mirror of
https://github.com/rightup/pyMC_Repeater.git
synced 2026-05-13 04:46:07 +02:00
Merge pull request #122 from agessaman/dev-companion-v2-cleanup
Add Companion module and multi-byte path support
This commit is contained in:
+5
-1
@@ -52,9 +52,13 @@ htmlcov/
|
||||
|
||||
# Config
|
||||
config.yaml
|
||||
config.yaml.backup
|
||||
identity.json
|
||||
|
||||
# Data
|
||||
data/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
.DS_Store
|
||||
syncpi.sh
|
||||
syncpi.sh
|
||||
|
||||
@@ -30,6 +30,11 @@ The repeater daemon runs continuously as a background process, forwarding LoRa p
|
||||
|
||||
## Supported Hardware (Out of the Box)
|
||||
|
||||
The repeater supports two radio backends:
|
||||
|
||||
- **SX1262 (SPI)** — Direct connection to LoRa modules (HATs, etc.) as listed below.
|
||||
- **KISS modem** — Serial TNC using the KISS protocol. Set `radio_type: kiss` in config and configure `kiss.port` and `kiss.baud_rate`.
|
||||
|
||||
> [!CAUTION]
|
||||
> ## Compatibility
|
||||
>
|
||||
@@ -48,7 +53,6 @@ The repeater daemon runs continuously as a background process, forwarding LoRa p
|
||||
> - Connected via a CH341F USB–SPI adapter
|
||||
> - Connected using hardware that supports Meshcore Kiss Modem firmware
|
||||
|
||||
|
||||
The following hardware is currently supported out-of-the-box:
|
||||
|
||||
Waveshare LoRaWAN/GNSS HAT (SPI Version Only)
|
||||
@@ -399,7 +403,3 @@ This software is intended for educational and experimental purposes. Always test
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
+62
-39
@@ -1,4 +1,6 @@
|
||||
# Default Repeater Configuration
|
||||
# radio_type: sx1262 | kiss (use kiss for serial KISS TNC modem)
|
||||
radio_type: sx1262
|
||||
|
||||
repeater:
|
||||
# Node name for logging and identification
|
||||
@@ -90,20 +92,20 @@ repeater:
|
||||
security:
|
||||
# Maximum number of authenticated clients (across all identities)
|
||||
max_clients: 1
|
||||
|
||||
|
||||
# Admin password for full access
|
||||
admin_password: "admin123"
|
||||
|
||||
|
||||
# Guest password for limited access
|
||||
guest_password: "guest123"
|
||||
|
||||
|
||||
# Allow read-only access for clients without password/not in ACL
|
||||
allow_read_only: false
|
||||
|
||||
|
||||
# JWT secret key for signing tokens (auto-generated if not provided)
|
||||
# Generate with: python -c "import secrets; print(secrets.token_hex(32))"
|
||||
jwt_secret: ""
|
||||
|
||||
|
||||
# JWT token expiry time in minutes (default: 60 minutes / 1 hour)
|
||||
# Controls how long users stay logged in before needing to re-authenticate
|
||||
jwt_expiry_minutes: 60
|
||||
@@ -130,7 +132,7 @@ identities:
|
||||
# - name: "TestBBS"
|
||||
# identity_key: "your_room_identity_key_hex_here"
|
||||
# type: "room_server"
|
||||
#
|
||||
#
|
||||
# # Room-specific settings
|
||||
# settings:
|
||||
# node_name: "Test BBS Room"
|
||||
@@ -138,17 +140,32 @@ identities:
|
||||
# longitude: 0.0
|
||||
# admin_password: "room_admin_password"
|
||||
# guest_password: "room_guest_password"
|
||||
|
||||
# Add more room servers as needed
|
||||
# - name: "SocialHub"
|
||||
# identity_key: "another_identity_key_hex_here"
|
||||
# type: "room_server"
|
||||
# settings:
|
||||
# node_name: "Social Hub"
|
||||
# latitude: 0.0
|
||||
# longitude: 0.0
|
||||
# admin_password: "social_admin_123"
|
||||
# guest_password: "social_guest_123"
|
||||
# Add more room servers as needed
|
||||
# - name: "SocialHub"
|
||||
# identity_key: "another_identity_key_hex_here"
|
||||
# type: "room_server"
|
||||
# settings:
|
||||
# node_name: "Social Hub"
|
||||
# latitude: 0.0
|
||||
# longitude: 0.0
|
||||
# admin_password: "social_admin_123"
|
||||
# guest_password: "social_guest_123"
|
||||
|
||||
# Companion Identities
|
||||
# Each companion exposes the MeshCore frame protocol over TCP for standard clients.
|
||||
# One TCP client per companion at a time. Clients connect to repeater-ip:tcp_port.
|
||||
companions:
|
||||
# - name: "RepeaterCompanion"
|
||||
# identity_key: "your_companion_identity_key_hex_here"
|
||||
# settings:
|
||||
# node_name: "RepeaterCompanion"
|
||||
# tcp_port: 5000
|
||||
# bind_address: "0.0.0.0"
|
||||
# - name: "BotCompanion"
|
||||
# identity_key: "another_companion_identity_key_hex"
|
||||
# settings:
|
||||
# node_name: "meshcore-bot"
|
||||
# tcp_port: 5001
|
||||
|
||||
# Radio hardware type
|
||||
# Supported:
|
||||
@@ -161,7 +178,7 @@ radio_type: sx1262
|
||||
ch341:
|
||||
vid: 6790 # 0x1A86
|
||||
pid: 21778 # 0x5512
|
||||
|
||||
|
||||
radio:
|
||||
# Frequency in Hz (869.618 MHz for EU)
|
||||
frequency: 869618000
|
||||
@@ -187,6 +204,11 @@ radio:
|
||||
# Use implicit header mode
|
||||
implicit_header: false
|
||||
|
||||
# KISS modem (when radio_type: kiss). Requires pyMC_core with KISS support.
|
||||
# kiss:
|
||||
# port: "/dev/ttyUSB0"
|
||||
# baud_rate: 9600
|
||||
|
||||
# SX1262 Hardware Configuration
|
||||
# NOTE:
|
||||
# - When radio_type: sx1262, these pins are BCM GPIO numbers.
|
||||
@@ -238,39 +260,39 @@ duty_cycle:
|
||||
mqtt:
|
||||
# Enable/disable MQTT publishing
|
||||
enabled: false
|
||||
|
||||
|
||||
# MQTT broker settings
|
||||
broker: "localhost"
|
||||
port: 1883 # Use 8883 for TLS/SSL, 80/443/9001 for WebSockets
|
||||
|
||||
|
||||
# Use WebSocket transport instead of standard TCP
|
||||
# Typically uses ports: 80 (ws://), 443 (wss://), or 9001
|
||||
use_websockets: false
|
||||
|
||||
|
||||
# Authentication (optional)
|
||||
username: null
|
||||
password: null
|
||||
|
||||
|
||||
# TLS/SSL configuration (optional)
|
||||
# For public brokers with trusted certificates, just enable TLS:
|
||||
# tls:
|
||||
# enabled: true
|
||||
tls:
|
||||
enabled: false
|
||||
|
||||
|
||||
# Advanced TLS options (usually not needed for public brokers):
|
||||
|
||||
|
||||
# Custom CA certificate for server verification
|
||||
# Leave null to use system default CA certificates (recommended)
|
||||
ca_cert: null # e.g., "/etc/ssl/certs/ca-certificates.crt"
|
||||
|
||||
|
||||
# Client certificate and key for mutual TLS (rarely needed)
|
||||
client_cert: null # e.g., "/etc/pymc/client.crt"
|
||||
client_key: null # e.g., "/etc/pymc/client.key"
|
||||
|
||||
|
||||
# Skip certificate verification (insecure, not recommended)
|
||||
insecure: false
|
||||
|
||||
|
||||
# Base topic for publishing
|
||||
# Messages will be published to: {base_topic}/{node_name}/{packet|advert}
|
||||
base_topic: "meshcore/repeater"
|
||||
@@ -278,36 +300,37 @@ mqtt:
|
||||
|
||||
# Storage Configuration
|
||||
storage:
|
||||
# Directory for persistent storage files (SQLite, RRD)
|
||||
# Directory for persistent storage files (SQLite, RRD).
|
||||
# Use a writable path for local/dev (e.g. "./var/pymc_repeater" or "~/var/pymc_repeater").
|
||||
storage_dir: "/var/lib/pymc_repeater"
|
||||
|
||||
# Data retention settings
|
||||
retention:
|
||||
# Clean up SQLite records older than this many days
|
||||
sqlite_cleanup_days: 31
|
||||
|
||||
|
||||
# RRD archives are managed automatically:
|
||||
# - 1 minute resolution for 1 week
|
||||
# - 5 minute resolution for 1 month
|
||||
# - 5 minute resolution for 1 month
|
||||
# - 1 hour resolution for 1 year
|
||||
|
||||
|
||||
letsmesh:
|
||||
enabled: false
|
||||
iata_code: "Test" # e.g., "SFO", "LHR", "Test"
|
||||
|
||||
|
||||
# ============================================================
|
||||
# BROKER SELECTION MODE - Choose how to connect to brokers
|
||||
# ============================================================
|
||||
#
|
||||
#
|
||||
# EXAMPLE 1: Single built-in broker (default, most common)
|
||||
# Connect to Europe only - simple, low bandwidth
|
||||
broker_index: 0 # 0 = Europe, 1 = US West
|
||||
|
||||
|
||||
# EXAMPLE 2: All built-in brokers for maximum redundancy
|
||||
# Survives single broker failure, best uptime
|
||||
# broker_index: -1 # or null - connects to both EU and US
|
||||
|
||||
|
||||
# EXAMPLE 3: Only custom brokers (private/self-hosted)
|
||||
# Ignores built-in LetsMesh brokers completely
|
||||
# broker_index: -2
|
||||
@@ -316,7 +339,7 @@ letsmesh:
|
||||
# host: "mqtt.myserver.com"
|
||||
# port: 443
|
||||
# audience: "mqtt.myserver.com"
|
||||
|
||||
|
||||
# EXAMPLE 4: Single built-in + custom backup
|
||||
# Use EU primary with your own backup
|
||||
# broker_index: 0
|
||||
@@ -325,7 +348,7 @@ letsmesh:
|
||||
# host: "mqtt-backup.mydomain.com"
|
||||
# port: 8883
|
||||
# audience: "mqtt-backup.mydomain.com"
|
||||
|
||||
|
||||
# EXAMPLE 5: All built-in + multiple custom (maximum redundancy)
|
||||
# EU + US + your own servers - best for critical deployments
|
||||
# broker_index: -1
|
||||
@@ -339,14 +362,14 @@ letsmesh:
|
||||
# port: 443
|
||||
# audience: "mqtt-2.mydomain.com"
|
||||
# ============================================================
|
||||
|
||||
|
||||
status_interval: 300
|
||||
owner: ""
|
||||
email: ""
|
||||
|
||||
|
||||
# Block specific packet types from being published to LetsMesh
|
||||
# If not specified or empty list, all types are published
|
||||
# Available types: REQ, RESPONSE, TXT_MSG, ACK, ADVERT, GRP_TXT,
|
||||
# Available types: REQ, RESPONSE, TXT_MSG, ACK, ADVERT, GRP_TXT,
|
||||
# GRP_DATA, ANON_REQ, PATH, TRACE, RAW_CUSTOM
|
||||
disallowed_packet_types: []
|
||||
# - REQ # Don't publish requests
|
||||
|
||||
@@ -126,17 +126,17 @@ output_format = "$OUTPUT_FORMAT"
|
||||
# Verify with pyMC if available
|
||||
try:
|
||||
from nacl.bindings import crypto_scalarmult_ed25519_base_noclamp
|
||||
|
||||
|
||||
scalar = key_bytes[:32]
|
||||
pubkey = crypto_scalarmult_ed25519_base_noclamp(scalar)
|
||||
|
||||
|
||||
print(f"Derived public key: {pubkey.hex()}")
|
||||
|
||||
|
||||
# Calculate address (MeshCore uses first byte of pubkey directly, not SHA256)
|
||||
address = pubkey[0]
|
||||
print(f"Node address: 0x{address:02x}")
|
||||
print()
|
||||
|
||||
|
||||
except ImportError:
|
||||
print("Warning: PyNaCl not available, skipping verification")
|
||||
print()
|
||||
|
||||
Vendored
+2
-2
@@ -38,13 +38,13 @@ case "$1" in
|
||||
echo "Installing pymc_core[hardware] from PyPI..."
|
||||
python3 -m pip install --break-system-packages 'pymc_core[hardware]>=1.0.7' || true
|
||||
fi
|
||||
|
||||
|
||||
# Install packages not available in Debian repos
|
||||
if ! python3 -c "import cherrypy_cors" 2>/dev/null; then
|
||||
echo "Installing cherrypy-cors from PyPI..."
|
||||
python3 -m pip install --break-system-packages 'cherrypy-cors==1.7.0' || true
|
||||
fi
|
||||
|
||||
|
||||
if ! python3 -c "import ws4py" 2>/dev/null; then
|
||||
echo "Installing ws4py from PyPI..."
|
||||
python3 -m pip install --break-system-packages 'ws4py>=0.5.1' || true
|
||||
|
||||
@@ -120,7 +120,7 @@ get_status_display() {
|
||||
# Main menu
|
||||
show_main_menu() {
|
||||
local status=$(get_status_display)
|
||||
|
||||
|
||||
CHOICE=$($DIALOG --backtitle "pyMC Repeater Management" --title "pyMC Repeater Management" --menu "\nCurrent Status: $status\n\nChoose an action:" 18 70 9 \
|
||||
"install" "Install pyMC Repeater" \
|
||||
"upgrade" "Upgrade existing installation" \
|
||||
@@ -133,7 +133,7 @@ show_main_menu() {
|
||||
"logs" "View live logs" \
|
||||
"status" "Show detailed status" \
|
||||
"exit" "Exit" 3>&1 1>&2 2>&3)
|
||||
|
||||
|
||||
case $CHOICE in
|
||||
"install")
|
||||
if is_installed; then
|
||||
@@ -200,11 +200,11 @@ install_repeater() {
|
||||
show_error "Installation requires root privileges.\n\nPlease run: sudo $0"
|
||||
return
|
||||
fi
|
||||
|
||||
|
||||
# Welcome screen
|
||||
$DIALOG --backtitle "pyMC Repeater Management" --title "Welcome" --msgbox "\nWelcome to pyMC Repeater Setup\n\nThis installer will configure your Linux system as a LoRa mesh network repeater.\n\nPress OK to continue..." 12 70
|
||||
|
||||
# SPI Check - skip for CH341 USB-SPI adapter (handles SPI over USB)
|
||||
|
||||
# SPI Check - Universal approach that works on all boards (skip for CH341 USB-SPI adapter)
|
||||
SPI_MISSING=0
|
||||
USES_CH341=0
|
||||
if [ -f "$CONFIG_DIR/config.yaml" ]; then
|
||||
@@ -221,7 +221,7 @@ install_repeater() {
|
||||
elif [ -f "/boot/config.txt" ]; then
|
||||
CONFIG_FILE="/boot/config.txt"
|
||||
fi
|
||||
|
||||
|
||||
if [ -n "$CONFIG_FILE" ]; then
|
||||
# Raspberry Pi detected - offer to enable SPI
|
||||
if ask_yes_no "SPI Not Enabled" "\nSPI interface is required but not detected (/dev/spidev* not found)!\n\nWould you like to enable it now?\n(This will require a reboot)"; then
|
||||
@@ -250,10 +250,10 @@ install_repeater() {
|
||||
if [ "$SPI_MISSING" -eq 1 ]; then
|
||||
show_info "Warning" "\nContinuing without SPI enabled.\n\nLoRa radio will not work until SPI is enabled and /dev/spidev* is available."
|
||||
fi
|
||||
|
||||
|
||||
# Get script directory for file copying during installation
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
|
||||
|
||||
# Installation progress
|
||||
echo ""
|
||||
echo "═══════════════════════════════════════════════════════════════"
|
||||
@@ -265,24 +265,24 @@ install_repeater() {
|
||||
if ! id "$SERVICE_USER" &>/dev/null; then
|
||||
useradd --system --home /var/lib/pymc_repeater --shell /sbin/nologin "$SERVICE_USER"
|
||||
fi
|
||||
|
||||
echo ">>> Adding user to hardware groups..."
|
||||
|
||||
echo "10"; echo "# Adding user to hardware groups..."
|
||||
for grp in plugdev dialout gpio i2c spi; do
|
||||
getent group "$grp" >/dev/null 2>&1 && usermod -a -G "$grp" "$SERVICE_USER" 2>/dev/null || true
|
||||
done
|
||||
|
||||
echo ">>> Creating directories..."
|
||||
|
||||
echo "20"; echo "# Creating directories..."
|
||||
mkdir -p "$INSTALL_DIR" "$CONFIG_DIR" "$LOG_DIR" /var/lib/pymc_repeater
|
||||
|
||||
echo ">>> Installing system dependencies..."
|
||||
|
||||
echo "25"; echo "# Installing system dependencies..."
|
||||
apt-get update -qq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y libffi-dev libusb-1.0-0 sudo jq pip python3-rrdtool wget swig build-essential python3-dev
|
||||
# Install polkit (package name varies by distro version)
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y policykit-1 2>/dev/null \
|
||||
|| DEBIAN_FRONTEND=noninteractive apt-get install -y polkitd pkexec 2>/dev/null \
|
||||
|| echo " Warning: Could not install polkit (sudo fallback will be used)"
|
||||
pip install --break-system-packages setuptools_scm 2>&1 || true
|
||||
|
||||
pip install --break-system-packages setuptools_scm >/dev/null 2>&1 || true
|
||||
|
||||
# Install mikefarah yq v4 if not already installed
|
||||
if ! command -v yq &> /dev/null || [[ "$(yq --version 2>&1)" != *"mikefarah/yq"* ]]; then
|
||||
echo ">>> Installing yq..."
|
||||
@@ -295,8 +295,8 @@ install_repeater() {
|
||||
fi
|
||||
wget -qO /usr/local/bin/yq "https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/${YQ_BINARY}" 2>/dev/null && chmod +x /usr/local/bin/yq
|
||||
fi
|
||||
|
||||
echo ">>> Generating version file..."
|
||||
|
||||
echo "28"; echo "# Generating version file..."
|
||||
cd "$SCRIPT_DIR"
|
||||
# Generate version file using setuptools_scm before copying
|
||||
if [ -d .git ]; then
|
||||
@@ -305,19 +305,19 @@ install_repeater() {
|
||||
python3 -m setuptools_scm >/dev/null 2>&1 || true
|
||||
python3 -c "from setuptools_scm import get_version; get_version(write_to='repeater/_version.py')" >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
|
||||
# Clean up stale bytecode in source directory before copying
|
||||
find "$SCRIPT_DIR/repeater" -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find "$SCRIPT_DIR/repeater" -type f -name '*.pyc' -delete 2>/dev/null || true
|
||||
|
||||
echo ">>> Cleaning old installation files..."
|
||||
|
||||
echo "29"; echo "# Cleaning old installation files..."
|
||||
# Remove old repeater directory to ensure clean install
|
||||
rm -rf "$INSTALL_DIR/repeater" 2>/dev/null || true
|
||||
# Clean up old Python bytecode
|
||||
find "$INSTALL_DIR" -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find "$INSTALL_DIR" -type f -name '*.pyc' -delete 2>/dev/null || true
|
||||
|
||||
echo ">>> Installing files..."
|
||||
|
||||
echo "30"; echo "# Installing files..."
|
||||
cp -r "$SCRIPT_DIR/repeater" "$INSTALL_DIR/"
|
||||
cp "$SCRIPT_DIR/pyproject.toml" "$INSTALL_DIR/"
|
||||
cp "$SCRIPT_DIR/README.md" "$INSTALL_DIR/"
|
||||
@@ -325,25 +325,25 @@ install_repeater() {
|
||||
cp "$SCRIPT_DIR/pymc-repeater.service" "$INSTALL_DIR/" 2>/dev/null || true
|
||||
cp "$SCRIPT_DIR/radio-settings.json" /var/lib/pymc_repeater/ 2>/dev/null || true
|
||||
cp "$SCRIPT_DIR/radio-presets.json" /var/lib/pymc_repeater/ 2>/dev/null || true
|
||||
|
||||
echo ">>> Installing configuration..."
|
||||
|
||||
echo "45"; echo "# Installing configuration..."
|
||||
cp "$SCRIPT_DIR/config.yaml.example" "$CONFIG_DIR/config.yaml.example"
|
||||
if [ ! -f "$CONFIG_DIR/config.yaml" ]; then
|
||||
cp "$SCRIPT_DIR/config.yaml.example" "$CONFIG_DIR/config.yaml"
|
||||
fi
|
||||
|
||||
echo ">>> Installing systemd service..."
|
||||
|
||||
echo "55"; echo "# Installing systemd service..."
|
||||
cp "$SCRIPT_DIR/pymc-repeater.service" /etc/systemd/system/
|
||||
systemctl daemon-reload
|
||||
|
||||
echo ">>> Installing udev rules for CH341..."
|
||||
echo "58"; echo "# Installing udev rules for CH341..."
|
||||
if [ -f "$SCRIPT_DIR/../pyMC_core/99-ch341.rules" ]; then
|
||||
cp "$SCRIPT_DIR/../pyMC_core/99-ch341.rules" /etc/udev/rules.d/99-ch341.rules
|
||||
udevadm control --reload-rules 2>/dev/null || true
|
||||
udevadm trigger 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo ">>> Setting permissions..."
|
||||
|
||||
echo "65"; echo "# Setting permissions..."
|
||||
chown -R "$SERVICE_USER:$SERVICE_USER" "$INSTALL_DIR" "$CONFIG_DIR" "$LOG_DIR" /var/lib/pymc_repeater
|
||||
chmod 750 "$CONFIG_DIR" "$LOG_DIR" /var/lib/pymc_repeater
|
||||
# Ensure the service user can create subdirectories in their home directory
|
||||
@@ -351,7 +351,7 @@ install_repeater() {
|
||||
# Pre-create the .config directory that the service will need
|
||||
mkdir -p /var/lib/pymc_repeater/.config/pymc_repeater
|
||||
chown -R "$SERVICE_USER:$SERVICE_USER" /var/lib/pymc_repeater/.config
|
||||
|
||||
|
||||
# Configure polkit for passwordless service restart
|
||||
echo ">>> Configuring polkit for service management..."
|
||||
mkdir -p /etc/polkit-1/rules.d
|
||||
@@ -365,7 +365,7 @@ polkit.addRule(function(action, subject) {
|
||||
});
|
||||
EOF
|
||||
chmod 0644 /etc/polkit-1/rules.d/10-pymc-repeater.rules
|
||||
|
||||
|
||||
# Also configure sudoers as fallback for service restart
|
||||
echo ">>> Configuring sudoers for service management..."
|
||||
mkdir -p /etc/sudoers.d
|
||||
@@ -374,12 +374,13 @@ EOF
|
||||
repeater ALL=(root) NOPASSWD: /usr/bin/systemctl restart pymc-repeater, /usr/bin/systemctl stop pymc-repeater, /usr/bin/systemctl start pymc-repeater, /usr/bin/systemctl status pymc-repeater
|
||||
EOF
|
||||
chmod 0440 /etc/sudoers.d/pymc-repeater
|
||||
|
||||
echo ">>> Enabling service..."
|
||||
|
||||
echo "75"; echo "# Starting service..."
|
||||
systemctl enable "$SERVICE_NAME"
|
||||
|
||||
echo ">>> Installation files complete."
|
||||
|
||||
|
||||
echo "90"; echo "# Installation files complete..."
|
||||
) | $DIALOG --backtitle "pyMC Repeater Management" --title "Installing" --gauge "Setting up pyMC Repeater..." 8 70 0
|
||||
|
||||
# Install Python package outside of progress gauge for better error handling
|
||||
clear
|
||||
echo "=== Installing Python Dependencies ==="
|
||||
@@ -387,13 +388,13 @@ EOF
|
||||
echo "Installing pymc_repeater and dependencies (including pymc_core from GitHub)..."
|
||||
echo "This may take a few minutes..."
|
||||
echo ""
|
||||
|
||||
|
||||
SCRIPT_DIR="$(dirname "$0")"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
|
||||
# Suppress pip root user warnings
|
||||
export PIP_ROOT_USER_ACTION=ignore
|
||||
|
||||
|
||||
# Calculate version from git for setuptools_scm
|
||||
if [ -d .git ]; then
|
||||
git fetch --tags 2>/dev/null || true
|
||||
@@ -403,16 +404,16 @@ EOF
|
||||
else
|
||||
export SETUPTOOLS_SCM_PRETEND_VERSION="1.0.5"
|
||||
fi
|
||||
|
||||
|
||||
# Force binary wheels for slow-to-compile packages (much faster on Raspberry Pi)
|
||||
export PIP_ONLY_BINARY=pycryptodome,cffi,PyNaCl,psutil
|
||||
echo "Note: Using optimized binary wheels for faster installation"
|
||||
echo ""
|
||||
|
||||
if pip install --break-system-packages --no-build-isolation --ignore-installed --no-cache-dir .; then
|
||||
|
||||
if pip install --break-system-packages --no-cache-dir .[hardware]; then
|
||||
echo ""
|
||||
echo "✓ Python package installation completed successfully!"
|
||||
|
||||
|
||||
# Reload systemd and start the service
|
||||
systemctl daemon-reload
|
||||
systemctl start "$SERVICE_NAME"
|
||||
@@ -422,7 +423,7 @@ EOF
|
||||
echo "Please check the error messages above and try again."
|
||||
read -p "Press Enter to continue..." || true
|
||||
fi
|
||||
|
||||
|
||||
# Show final results
|
||||
sleep 2
|
||||
local ip_address=$(hostname -I | awk '{print $1}')
|
||||
@@ -483,18 +484,18 @@ reset_repeater() {
|
||||
show_error "Upgrade requires root privileges.\n\nPlease run: sudo $0"
|
||||
return
|
||||
fi
|
||||
|
||||
|
||||
local current_version=$(get_version)
|
||||
|
||||
|
||||
if ask_yes_no "Confirm Reset of pyMC Repeater restoring to default configuration.\n\nContinue?"; then
|
||||
|
||||
|
||||
# Show info that upgrade is starting
|
||||
show_info "Reseting" "Starting reset process...\n\nProgress will be shown in the terminal."
|
||||
|
||||
|
||||
echo "=== Reset Progress ==="
|
||||
echo "[1/4] Stopping service..."
|
||||
systemctl stop "$SERVICE_NAME" 2>/dev/null || true
|
||||
|
||||
|
||||
echo "[2/4] Backing up configuration..."
|
||||
if [ -d "$CONFIG_DIR" ]; then
|
||||
cp -r "$CONFIG_DIR" "$CONFIG_DIR.backup.$(date +%Y%m%d_%H%M%S)" 2>/dev/null || true
|
||||
@@ -541,7 +542,7 @@ reset_repeater() {
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Upgrade function
|
||||
upgrade_repeater() {
|
||||
local silent="${1:-false}"
|
||||
@@ -553,31 +554,31 @@ upgrade_repeater() {
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
local current_version=$(get_version)
|
||||
|
||||
|
||||
if [[ "$silent" != "true" ]]; then
|
||||
if ! ask_yes_no "Confirm Upgrade" "Current version: $current_version\n\nThis will upgrade pyMC Repeater while preserving your configuration.\n\nContinue?"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Show info that upgrade is starting
|
||||
show_info "Upgrading" "Starting upgrade process...\n\nThis may take a few minutes.\nProgress will be shown in the terminal."
|
||||
else
|
||||
echo "Starting upgrade process..."
|
||||
echo "Current version: $current_version"
|
||||
fi
|
||||
|
||||
|
||||
echo "=== Upgrade Progress ==="
|
||||
echo "[1/9] Stopping service..."
|
||||
systemctl stop "$SERVICE_NAME" 2>/dev/null || true
|
||||
|
||||
|
||||
echo "[2/9] Backing up configuration..."
|
||||
if [ -d "$CONFIG_DIR" ]; then
|
||||
cp -r "$CONFIG_DIR" "$CONFIG_DIR.backup.$(date +%Y%m%d_%H%M%S)" 2>/dev/null || true
|
||||
echo " ✓ Configuration backed up"
|
||||
fi
|
||||
|
||||
|
||||
echo "[3/9] Updating system dependencies..."
|
||||
apt-get update -qq
|
||||
|
||||
@@ -587,7 +588,7 @@ upgrade_repeater() {
|
||||
|| apt-get install -y polkitd pkexec 2>/dev/null \
|
||||
|| echo " Warning: Could not install polkit (sudo fallback will be used)"
|
||||
pip install --break-system-packages setuptools_scm >/dev/null 2>&1 || true
|
||||
|
||||
|
||||
# Install mikefarah yq v4 if not already installed
|
||||
if ! command -v yq &> /dev/null || [[ "$(yq --version 2>&1)" != *"mikefarah/yq"* ]]; then
|
||||
YQ_VERSION="v4.40.5"
|
||||
@@ -600,7 +601,7 @@ upgrade_repeater() {
|
||||
wget -qO /usr/local/bin/yq "https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/${YQ_BINARY}" && chmod +x /usr/local/bin/yq
|
||||
fi
|
||||
echo " ✓ Dependencies updated"
|
||||
|
||||
|
||||
echo "[3.5/9] Generating version file..."
|
||||
SCRIPT_DIR="$(dirname "$0")"
|
||||
cd "$SCRIPT_DIR"
|
||||
@@ -616,7 +617,7 @@ upgrade_repeater() {
|
||||
find "$SCRIPT_DIR/repeater" -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find "$SCRIPT_DIR/repeater" -type f -name '*.pyc' -delete 2>/dev/null || true
|
||||
echo " ✓ Version file generated and bytecode cleaned"
|
||||
|
||||
|
||||
echo "[3.8/9] Cleaning old installation files..."
|
||||
# Remove old repeater directory to ensure clean upgrade
|
||||
rm -rf "$INSTALL_DIR/repeater" 2>/dev/null || true
|
||||
@@ -624,7 +625,7 @@ upgrade_repeater() {
|
||||
find "$INSTALL_DIR" -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find "$INSTALL_DIR" -type f -name '*.pyc' -delete 2>/dev/null || true
|
||||
echo " ✓ Old files cleaned"
|
||||
|
||||
|
||||
echo "[4/9] Installing new files..."
|
||||
cp -r repeater "$INSTALL_DIR/" 2>/dev/null || true
|
||||
cp pyproject.toml "$INSTALL_DIR/" 2>/dev/null || true
|
||||
@@ -633,14 +634,14 @@ upgrade_repeater() {
|
||||
cp radio-settings.json /var/lib/pymc_repeater/ 2>/dev/null || true
|
||||
cp radio-presets.json /var/lib/pymc_repeater/ 2>/dev/null || true
|
||||
echo " ✓ Files updated"
|
||||
|
||||
|
||||
echo "[5/9] Validating and updating configuration..."
|
||||
if validate_and_update_config; then
|
||||
echo " ✓ Configuration validated and updated"
|
||||
else
|
||||
echo " ⚠ Configuration validation failed, keeping existing config"
|
||||
fi
|
||||
|
||||
|
||||
echo "[5.5/9] Ensuring user groups and udev rules..."
|
||||
for grp in plugdev dialout gpio i2c spi; do
|
||||
getent group "$grp" >/dev/null 2>&1 && usermod -a -G "$grp" "$SERVICE_USER" 2>/dev/null || true
|
||||
@@ -684,24 +685,24 @@ repeater ALL=(root) NOPASSWD: /usr/bin/systemctl restart pymc-repeater, /usr/bin
|
||||
EOF
|
||||
chmod 0440 /etc/sudoers.d/pymc-repeater
|
||||
echo " ✓ Permissions updated"
|
||||
|
||||
|
||||
echo "[7/9] Reloading systemd..."
|
||||
systemctl daemon-reload
|
||||
echo " ✓ Systemd reloaded"
|
||||
|
||||
|
||||
echo "=== Installing Python Dependencies ==="
|
||||
echo ""
|
||||
echo "Updating pymc_repeater and dependencies (including pymc_core from GitHub)..."
|
||||
echo "This may take a few minutes..."
|
||||
echo ""
|
||||
|
||||
|
||||
# Install from source directory to properly resolve Git dependencies
|
||||
SCRIPT_DIR="$(dirname "$0")"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
|
||||
# Suppress pip root user warnings
|
||||
export PIP_ROOT_USER_ACTION=ignore
|
||||
|
||||
|
||||
# Calculate version from git for setuptools_scm
|
||||
if [ -d .git ]; then
|
||||
git fetch --tags 2>/dev/null || true
|
||||
@@ -711,36 +712,36 @@ EOF
|
||||
else
|
||||
export SETUPTOOLS_SCM_PRETEND_VERSION="1.0.5"
|
||||
fi
|
||||
|
||||
|
||||
# Force binary wheels for slow-to-compile packages (much faster on Raspberry Pi)
|
||||
export PIP_ONLY_BINARY=pycryptodome,cffi,PyNaCl,psutil
|
||||
echo "Note: Using optimized binary wheels and cached packages for faster installation"
|
||||
echo ""
|
||||
|
||||
|
||||
# Upgrade packages (uses cache for unchanged dependencies - much faster)
|
||||
if python3 -m pip install --break-system-packages --no-build-isolation --ignore-installed --upgrade --upgrade-strategy eager .; then
|
||||
if python3 -m pip install --break-system-packages --upgrade --upgrade-strategy eager .[hardware]; then
|
||||
echo ""
|
||||
echo "✓ Package and dependencies updated successfully!"
|
||||
else
|
||||
echo ""
|
||||
echo "⚠ Package update failed, but continuing..."
|
||||
fi
|
||||
|
||||
|
||||
|
||||
echo ""
|
||||
echo "✓ All packages including pymc_core reinstalled successfully"
|
||||
|
||||
|
||||
|
||||
echo "[8/9] Starting service..."
|
||||
systemctl daemon-reload
|
||||
systemctl start "$SERVICE_NAME"
|
||||
echo " ✓ Service started"
|
||||
|
||||
|
||||
echo "[9/9] Verifying installation..."
|
||||
sleep 3 # Give service time to start
|
||||
|
||||
|
||||
local new_version=$(get_version)
|
||||
|
||||
|
||||
if is_running; then
|
||||
echo " ✓ Service is running"
|
||||
# Container detection: warn about host-side udev rules
|
||||
@@ -779,10 +780,10 @@ configure_radio() {
|
||||
show_error "Service is not running!\n\nPlease start the service first from the main menu."
|
||||
return
|
||||
fi
|
||||
|
||||
|
||||
# Get IP address
|
||||
local ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
|
||||
# Show info about web-based configuration
|
||||
if ask_yes_no "Configure Radio Settings" "Radio configuration is now done through the web interface.\n\nThe web-based setup wizard provides an easy way to:\n\n• Change repeater name\n• Select hardware board\n• Configure radio frequency and settings\n• Update admin password\n\nWeb Dashboard: http://$ip_address:8000/setup\n\nWould you like to open this information?"; then
|
||||
clear
|
||||
@@ -818,7 +819,7 @@ uninstall_repeater() {
|
||||
show_error "Uninstall requires root privileges.\n\nPlease run: sudo $0"
|
||||
return
|
||||
fi
|
||||
|
||||
|
||||
if ask_yes_no "Confirm Uninstall" "This will completely remove pyMC Repeater including:\n\n- Service and files\n- Configuration (backup will be created)\n- Logs and data\n\nThis action cannot be undone!\n\nContinue?"; then
|
||||
echo ""
|
||||
echo "═══════════════════════════════════════════════════════════════"
|
||||
@@ -829,33 +830,34 @@ uninstall_repeater() {
|
||||
echo ">>> Stopping and disabling service..."
|
||||
systemctl stop "$SERVICE_NAME" 2>/dev/null || true
|
||||
systemctl disable "$SERVICE_NAME" 2>/dev/null || true
|
||||
|
||||
echo ">>> Backing up configuration..."
|
||||
|
||||
echo "20"; echo "# Backing up configuration..."
|
||||
if [ -d "$CONFIG_DIR" ]; then
|
||||
cp -r "$CONFIG_DIR" "/tmp/pymc_repeater_config_backup_$(date +%Y%m%d_%H%M%S)" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo ">>> Removing service files..."
|
||||
|
||||
echo "40"; echo "# Removing service files..."
|
||||
rm -f /etc/systemd/system/pymc-repeater.service
|
||||
systemctl daemon-reload
|
||||
|
||||
echo ">>> Removing installation..."
|
||||
|
||||
echo "50"; echo "# Removing polkit and sudoers rules..."
|
||||
rm -f /etc/polkit-1/rules.d/10-pymc-repeater.rules
|
||||
rm -f /etc/sudoers.d/pymc-repeater
|
||||
|
||||
echo "60"; echo "# Removing installation..."
|
||||
rm -rf "$INSTALL_DIR"
|
||||
rm -rf "$CONFIG_DIR"
|
||||
rm -rf "$LOG_DIR"
|
||||
rm -rf /var/lib/pymc_repeater
|
||||
|
||||
echo ">>> Removing service user..."
|
||||
|
||||
echo "80"; echo "# Removing service user..."
|
||||
if id "$SERVICE_USER" &>/dev/null; then
|
||||
userdel "$SERVICE_USER" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo ">>> Removing polkit and sudoers rules..."
|
||||
rm -f /etc/polkit-1/rules.d/10-pymc-repeater.rules
|
||||
rm -f /etc/sudoers.d/pymc-repeater
|
||||
|
||||
echo ">>> Uninstall complete!"
|
||||
|
||||
|
||||
echo "100"; echo "# Uninstall complete!"
|
||||
) | $DIALOG --backtitle "pyMC Repeater Management" --title "Uninstalling" --gauge "Removing pyMC Repeater..." 8 70 0
|
||||
|
||||
show_info "Uninstall Complete" "\npyMC Repeater has been completely removed.\n\nConfiguration backup saved to /tmp/\n\nThank you for using pyMC Repeater!"
|
||||
fi
|
||||
}
|
||||
@@ -864,7 +866,7 @@ uninstall_repeater() {
|
||||
manage_service() {
|
||||
local action=$1
|
||||
local silent="${2:-false}"
|
||||
|
||||
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
if [[ "$silent" == "true" ]]; then
|
||||
echo "Service management requires root privileges. Please run: sudo $0 $action"
|
||||
@@ -873,7 +875,7 @@ manage_service() {
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
if ! service_exists; then
|
||||
if [[ "$silent" == "true" ]]; then
|
||||
echo "Service is not installed."
|
||||
@@ -882,7 +884,7 @@ manage_service() {
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
case $action in
|
||||
"start")
|
||||
if ! is_enabled; then
|
||||
@@ -937,14 +939,14 @@ show_detailed_status() {
|
||||
local status_info=""
|
||||
local version=$(get_version)
|
||||
local ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
|
||||
status_info="Installation Status: "
|
||||
if is_installed; then
|
||||
status_info="${status_info}Installed\n"
|
||||
status_info="${status_info}Version: $version\n"
|
||||
status_info="${status_info}Install Directory: $INSTALL_DIR\n"
|
||||
status_info="${status_info}Config Directory: $CONFIG_DIR\n\n"
|
||||
|
||||
|
||||
status_info="${status_info}Service Status: "
|
||||
if is_running; then
|
||||
status_info="${status_info}Running ✓\n"
|
||||
@@ -952,7 +954,7 @@ show_detailed_status() {
|
||||
else
|
||||
status_info="${status_info}Stopped ✗\n\n"
|
||||
fi
|
||||
|
||||
|
||||
# Add system info
|
||||
status_info="${status_info}System Info:\n"
|
||||
status_info="${status_info}- SPI: "
|
||||
@@ -961,14 +963,14 @@ show_detailed_status() {
|
||||
else
|
||||
status_info="${status_info}Disabled ✗\n"
|
||||
fi
|
||||
|
||||
|
||||
status_info="${status_info}- IP Address: $ip_address\n"
|
||||
status_info="${status_info}- Hostname: $(hostname)\n"
|
||||
|
||||
|
||||
else
|
||||
status_info="${status_info}Not Installed"
|
||||
fi
|
||||
|
||||
|
||||
show_info "System Status" "$status_info"
|
||||
}
|
||||
|
||||
@@ -977,7 +979,7 @@ validate_and_update_config() {
|
||||
local config_file="$CONFIG_DIR/config.yaml"
|
||||
local example_file="config.yaml.example"
|
||||
local updated_example="$CONFIG_DIR/config.yaml.example"
|
||||
|
||||
|
||||
# Copy the new example file
|
||||
if [ -f "$example_file" ]; then
|
||||
cp "$example_file" "$updated_example"
|
||||
@@ -985,46 +987,46 @@ validate_and_update_config() {
|
||||
echo " ⚠ config.yaml.example not found in source directory"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Check if user config exists
|
||||
if [ ! -f "$config_file" ]; then
|
||||
echo " ⚠ No existing config.yaml found, copying example"
|
||||
cp "$updated_example" "$config_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Check if yq is available
|
||||
YQ_CMD="/usr/local/bin/yq"
|
||||
if ! command -v "$YQ_CMD" &> /dev/null; then
|
||||
echo " ⚠ mikefarah yq not found at $YQ_CMD, skipping config merge"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Verify it's the correct yq version
|
||||
if [[ "$($YQ_CMD --version 2>&1)" != *"mikefarah/yq"* ]]; then
|
||||
echo " ⚠ Wrong yq version detected at $YQ_CMD, skipping config merge"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
echo " Merging configuration..."
|
||||
|
||||
|
||||
# Create backup of user config
|
||||
local backup_file="${config_file}.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
cp "$config_file" "$backup_file"
|
||||
echo " ✓ Backup created: $backup_file"
|
||||
|
||||
|
||||
# Merge strategy: user config takes precedence, add missing keys from example
|
||||
# This uses yq's multiply merge operator (*) which:
|
||||
# - Keeps all values from the right operand (user config)
|
||||
# - Adds missing keys from the left operand (example config)
|
||||
local temp_merged="${config_file}.merged"
|
||||
|
||||
|
||||
# Strip comments from user config before merge to prevent comment accumulation.
|
||||
# yq preserves comments from both files, so each upgrade cycle would duplicate
|
||||
# the header and inline comments. We keep only the example's comments.
|
||||
local stripped_user="${config_file}.stripped"
|
||||
"$YQ_CMD" eval '... comments=""' "$config_file" > "$stripped_user" 2>/dev/null || cp "$config_file" "$stripped_user"
|
||||
|
||||
|
||||
if "$YQ_CMD" eval-all '. as $item ireduce ({}; . * $item)' "$updated_example" "$stripped_user" > "$temp_merged" 2>/dev/null; then
|
||||
rm -f "$stripped_user"
|
||||
# Verify the merged file is valid YAML
|
||||
|
||||
+9
-2
@@ -31,7 +31,7 @@ keywords = ["mesh", "networking", "lora", "repeater", "daemon", "iot"]
|
||||
|
||||
|
||||
dependencies = [
|
||||
"pymc_core[hardware] @ git+https://github.com/rightup/pyMC_core.git@feat/newRadios",
|
||||
"pymc_core",
|
||||
"pyyaml>=6.0.0",
|
||||
"cherrypy>=18.0.0",
|
||||
"paho-mqtt>=1.6.0",
|
||||
@@ -44,6 +44,14 @@ dependencies = [
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
# SX1262/SPI support (Linux only; required for Raspberry Pi HATs)
|
||||
hardware = [
|
||||
"pymc_core[hardware]",
|
||||
]
|
||||
# RRD metrics (Performance Metrics chart); system librrd required (e.g. apt install rrdtool)
|
||||
rrd = [
|
||||
"rrdtool",
|
||||
]
|
||||
dev = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-asyncio>=0.21.0",
|
||||
@@ -78,4 +86,3 @@ line_length = 100
|
||||
[tool.setuptools_scm]
|
||||
version_scheme = "guess-next-dev"
|
||||
local_scheme = "no-local-version"
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ try:
|
||||
except ImportError:
|
||||
try:
|
||||
from importlib.metadata import version
|
||||
|
||||
__version__ = version("pymc_repeater")
|
||||
except Exception:
|
||||
__version__ = "unknown"
|
||||
|
||||
+9
-9
@@ -37,9 +37,9 @@ class AirtimeManager:
|
||||
) -> float:
|
||||
"""
|
||||
Calculate LoRa packet airtime using the Semtech reference formula.
|
||||
|
||||
|
||||
Reference: https://www.semtech.com/design-support/lora-calculator
|
||||
|
||||
|
||||
Args:
|
||||
payload_len: Payload length in bytes
|
||||
spreading_factor: SF7-SF12 (uses config value if None)
|
||||
@@ -48,7 +48,7 @@ class AirtimeManager:
|
||||
preamble_len: Preamble symbols (uses config value if None)
|
||||
crc_enabled: Whether CRC is enabled (default: True)
|
||||
explicit_header: Whether explicit header mode is used (default: True)
|
||||
|
||||
|
||||
Returns:
|
||||
Airtime in milliseconds
|
||||
"""
|
||||
@@ -58,25 +58,25 @@ class AirtimeManager:
|
||||
preamble_len = preamble_len or self.preamble_length
|
||||
crc = 1 if crc_enabled else 0
|
||||
h = 0 if explicit_header else 1 # H=0 for explicit, H=1 for implicit
|
||||
|
||||
|
||||
# Low data rate optimization: required for SF11/SF12 at 125kHz
|
||||
de = 1 if (sf >= 11 and bw_hz <= 125000) else 0
|
||||
|
||||
|
||||
# Symbol time in milliseconds: T_sym = 2^SF / BW_kHz
|
||||
t_sym = (2 ** sf) / (bw_hz / 1000)
|
||||
|
||||
|
||||
# Preamble time: T_preamble = (n_preamble + 4.25) * T_sym
|
||||
t_preamble = (preamble_len + 4.25) * t_sym
|
||||
|
||||
|
||||
# Payload symbol calculation (Semtech formula):
|
||||
# n_payload = 8 + ceil(max(8*PL - 4*SF + 28 + 16*CRC - 20*H, 0) / (4*(SF - 2*DE))) * CR
|
||||
numerator = max(8 * payload_len - 4 * sf + 28 + 16 * crc - 20 * h, 0)
|
||||
denominator = 4 * (sf - 2 * de)
|
||||
n_payload = 8 + math.ceil(numerator / denominator) * cr
|
||||
|
||||
|
||||
# Payload time
|
||||
t_payload = n_payload * t_sym
|
||||
|
||||
|
||||
# Total packet airtime
|
||||
return t_preamble + t_payload
|
||||
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
"""Companion identity support for pyMC Repeater.
|
||||
|
||||
Exposes the MeshCore companion frame protocol over TCP for standard clients.
|
||||
"""
|
||||
|
||||
from .bridge import RepeaterCompanionBridge
|
||||
from .constants import (
|
||||
CMD_APP_START,
|
||||
CMD_GET_CONTACTS,
|
||||
CMD_SEND_LOGIN,
|
||||
CMD_SEND_TXT_MSG,
|
||||
CMD_SYNC_NEXT_MESSAGE,
|
||||
PUSH_CODE_MSG_WAITING,
|
||||
RESP_CODE_ERR,
|
||||
RESP_CODE_OK,
|
||||
)
|
||||
from .frame_server import CompanionFrameServer
|
||||
|
||||
__all__ = [
|
||||
"CompanionFrameServer",
|
||||
"RepeaterCompanionBridge",
|
||||
"CMD_APP_START",
|
||||
"CMD_GET_CONTACTS",
|
||||
"CMD_SEND_TXT_MSG",
|
||||
"CMD_SYNC_NEXT_MESSAGE",
|
||||
"CMD_SEND_LOGIN",
|
||||
"RESP_CODE_OK",
|
||||
"RESP_CODE_ERR",
|
||||
"PUSH_CODE_MSG_WAITING",
|
||||
]
|
||||
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Repeater CompanionBridge with SQLite-backed preference persistence.
|
||||
|
||||
Persists full NodePrefs as a JSON blob so companion settings (including
|
||||
auto-add config) survive repeater restarts. Merge-on-load supports
|
||||
schema evolution when NodePrefs gains or loses fields.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
from pymc_core.companion import CompanionBridge
|
||||
|
||||
logger = logging.getLogger("RepeaterCompanionBridge")
|
||||
|
||||
|
||||
def _to_json_safe(value: Any) -> Any:
|
||||
"""Convert a value to a JSON-serializable form (avoids TypeError from enums, bytes, etc.)."""
|
||||
if value is None or isinstance(value, (bool, int, float, str)):
|
||||
return value
|
||||
if isinstance(value, Enum):
|
||||
return value.value
|
||||
if isinstance(value, bytes):
|
||||
return value.hex()
|
||||
if isinstance(value, (list, tuple)):
|
||||
return [_to_json_safe(v) for v in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _to_json_safe(v) for k, v in value.items()}
|
||||
if dataclasses.is_dataclass(value) and not isinstance(value, type):
|
||||
return {f.name: _to_json_safe(getattr(value, f.name)) for f in dataclasses.fields(value)}
|
||||
return value
|
||||
|
||||
|
||||
class RepeaterCompanionBridge(CompanionBridge):
|
||||
"""CompanionBridge that persists and loads prefs (full NodePrefs) via SQLite JSON blob."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
identity,
|
||||
packet_injector: Callable[..., Any],
|
||||
node_name: str = "pyMC",
|
||||
adv_type: int = 1,
|
||||
max_contacts: int = 1000,
|
||||
max_channels: int = 40,
|
||||
offline_queue_size: int = 512,
|
||||
radio_config: Optional[dict] = None,
|
||||
authenticate_callback: Optional[Callable[..., tuple[bool, int]]] = None,
|
||||
initial_contacts: Optional[Any] = None,
|
||||
*,
|
||||
sqlite_handler=None,
|
||||
companion_hash: str = "",
|
||||
on_prefs_saved: Optional[Callable[[str], None]] = None,
|
||||
) -> None:
|
||||
self._sqlite_handler = sqlite_handler
|
||||
self._companion_hash = companion_hash
|
||||
self._on_prefs_saved = on_prefs_saved
|
||||
super().__init__(
|
||||
identity=identity,
|
||||
packet_injector=packet_injector,
|
||||
node_name=node_name,
|
||||
adv_type=adv_type,
|
||||
max_contacts=max_contacts,
|
||||
max_channels=max_channels,
|
||||
offline_queue_size=offline_queue_size,
|
||||
radio_config=radio_config,
|
||||
authenticate_callback=authenticate_callback,
|
||||
initial_contacts=initial_contacts,
|
||||
)
|
||||
# Load persisted prefs (e.g. node_name) from SQLite so matching uses last-saved name
|
||||
self._load_prefs()
|
||||
|
||||
def _save_prefs(self) -> None:
|
||||
"""Persist full NodePrefs as JSON to SQLite."""
|
||||
if not self._sqlite_handler or not self._companion_hash:
|
||||
return
|
||||
try:
|
||||
prefs_dict = dataclasses.asdict(self.prefs)
|
||||
prefs_safe = _to_json_safe(prefs_dict)
|
||||
self._sqlite_handler.companion_save_prefs(
|
||||
str(self._companion_hash), prefs_safe
|
||||
)
|
||||
if self._on_prefs_saved:
|
||||
try:
|
||||
self._on_prefs_saved(self.prefs.node_name)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to sync node_name to config: %s", e)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to persist companion prefs: %s", e)
|
||||
|
||||
def _load_prefs(self) -> None:
|
||||
"""Load prefs from SQLite JSON and merge into self.prefs (only known keys)."""
|
||||
if not self._sqlite_handler or not self._companion_hash:
|
||||
return
|
||||
try:
|
||||
stored = self._sqlite_handler.companion_load_prefs(self._companion_hash)
|
||||
if not stored or not isinstance(stored, dict):
|
||||
return
|
||||
for key, value in stored.items():
|
||||
if not hasattr(self.prefs, key):
|
||||
continue
|
||||
current = getattr(self.prefs, key)
|
||||
try:
|
||||
if value is None:
|
||||
continue
|
||||
if isinstance(current, bool):
|
||||
setattr(self.prefs, key, bool(value))
|
||||
elif isinstance(current, int):
|
||||
setattr(self.prefs, key, int(value))
|
||||
elif isinstance(current, float):
|
||||
setattr(self.prefs, key, float(value))
|
||||
elif isinstance(current, str):
|
||||
setattr(self.prefs, key, str(value))
|
||||
else:
|
||||
setattr(self.prefs, key, value)
|
||||
except (TypeError, ValueError) as e:
|
||||
logger.debug("Skip prefs key %r: %s", key, e)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to load companion prefs: %s", e)
|
||||
@@ -0,0 +1,150 @@
|
||||
"""Companion frame protocol constants — re-exported from pyMC_core.
|
||||
|
||||
All protocol constants now live in :mod:`pymc_core.companion.constants`.
|
||||
This module re-exports them so existing repeater imports continue to work.
|
||||
"""
|
||||
|
||||
# Re-exports; F401 ignored for re-exported names.
|
||||
from pymc_core.companion.constants import ( # noqa: F401
|
||||
ADV_TYPE_CHAT,
|
||||
ADV_TYPE_REPEATER,
|
||||
ADV_TYPE_ROOM,
|
||||
ADV_TYPE_SENSOR,
|
||||
ADVERT_LOC_NONE,
|
||||
ADVERT_LOC_SHARE,
|
||||
AUTOADD_CHAT,
|
||||
AUTOADD_OVERWRITE_OLDEST,
|
||||
AUTOADD_REPEATER,
|
||||
AUTOADD_ROOM,
|
||||
AUTOADD_SENSOR,
|
||||
CMD_ADD_UPDATE_CONTACT,
|
||||
CMD_APP_START,
|
||||
CMD_DEVICE_QUERY,
|
||||
CMD_EXPORT_CONTACT,
|
||||
CMD_EXPORT_PRIVATE_KEY,
|
||||
CMD_FACTORY_RESET,
|
||||
CMD_GET_ADVERT_PATH,
|
||||
CMD_GET_AUTOADD_CONFIG,
|
||||
CMD_GET_BATT_AND_STORAGE,
|
||||
CMD_GET_CHANNEL,
|
||||
CMD_GET_CONTACT_BY_KEY,
|
||||
CMD_GET_CONTACTS,
|
||||
CMD_GET_CUSTOM_VARS,
|
||||
CMD_GET_DEVICE_TIME,
|
||||
CMD_GET_STATS,
|
||||
CMD_GET_TUNING_PARAMS,
|
||||
CMD_HAS_CONNECTION,
|
||||
CMD_IMPORT_CONTACT,
|
||||
CMD_IMPORT_PRIVATE_KEY,
|
||||
CMD_LOGOUT,
|
||||
CMD_REBOOT,
|
||||
CMD_REMOVE_CONTACT,
|
||||
CMD_RESET_PATH,
|
||||
CMD_SEND_ANON_REQ,
|
||||
CMD_SEND_BINARY_REQ,
|
||||
CMD_SEND_CHANNEL_TXT_MSG,
|
||||
CMD_SEND_CONTROL_DATA,
|
||||
CMD_SEND_LOGIN,
|
||||
CMD_SEND_PATH_DISCOVERY_REQ,
|
||||
CMD_SEND_RAW_DATA,
|
||||
CMD_SEND_SELF_ADVERT,
|
||||
CMD_SEND_STATUS_REQ,
|
||||
CMD_SEND_TELEMETRY_REQ,
|
||||
CMD_SEND_TRACE_PATH,
|
||||
CMD_SEND_TXT_MSG,
|
||||
CMD_SET_ADVERT_LATLON,
|
||||
CMD_SET_ADVERT_NAME,
|
||||
CMD_SET_AUTOADD_CONFIG,
|
||||
CMD_SET_CHANNEL,
|
||||
CMD_SET_CUSTOM_VAR,
|
||||
CMD_SET_DEVICE_PIN,
|
||||
CMD_SET_DEVICE_TIME,
|
||||
CMD_SET_FLOOD_SCOPE,
|
||||
CMD_SET_OTHER_PARAMS,
|
||||
CMD_SET_RADIO_PARAMS,
|
||||
CMD_SET_RADIO_TX_POWER,
|
||||
CMD_SET_TUNING_PARAMS,
|
||||
CMD_SHARE_CONTACT,
|
||||
CMD_SIGN_DATA,
|
||||
CMD_SIGN_FINISH,
|
||||
CMD_SIGN_START,
|
||||
CMD_SYNC_NEXT_MESSAGE,
|
||||
CONTACT_NAME_SIZE,
|
||||
DEFAULT_MAX_CHANNELS,
|
||||
DEFAULT_MAX_CONTACTS,
|
||||
DEFAULT_OFFLINE_QUEUE_SIZE,
|
||||
DEFAULT_PUBLIC_CHANNEL_SECRET,
|
||||
DEFAULT_RESPONSE_TIMEOUT_MS,
|
||||
ERR_CODE_BAD_STATE,
|
||||
ERR_CODE_FILE_IO_ERROR,
|
||||
ERR_CODE_ILLEGAL_ARG,
|
||||
ERR_CODE_NOT_FOUND,
|
||||
ERR_CODE_TABLE_FULL,
|
||||
ERR_CODE_UNSUPPORTED_CMD,
|
||||
FRAME_INBOUND_PREFIX,
|
||||
FRAME_OUTBOUND_PREFIX,
|
||||
MAX_FRAME_SIZE,
|
||||
MAX_PATH_SIZE,
|
||||
MAX_SIGN_DATA_SIZE,
|
||||
MSG_SEND_FAILED,
|
||||
MSG_SEND_SENT_DIRECT,
|
||||
MSG_SEND_SENT_FLOOD,
|
||||
PROTOCOL_CODE_ANON_REQ,
|
||||
PROTOCOL_CODE_BINARY_REQ,
|
||||
PROTOCOL_CODE_RAW_DATA,
|
||||
PUB_KEY_SIZE,
|
||||
PUBLIC_GROUP_PSK,
|
||||
PUSH_CODE_ADVERT,
|
||||
PUSH_CODE_BINARY_RESPONSE,
|
||||
PUSH_CODE_CONTACT_DELETED,
|
||||
PUSH_CODE_CONTACTS_FULL,
|
||||
PUSH_CODE_CONTROL_DATA,
|
||||
PUSH_CODE_LOG_RX_DATA,
|
||||
PUSH_CODE_LOGIN_FAIL,
|
||||
PUSH_CODE_LOGIN_SUCCESS,
|
||||
PUSH_CODE_MSG_WAITING,
|
||||
PUSH_CODE_NEW_ADVERT,
|
||||
PUSH_CODE_PATH_DISCOVERY_RESPONSE,
|
||||
PUSH_CODE_PATH_UPDATED,
|
||||
PUSH_CODE_RAW_DATA,
|
||||
PUSH_CODE_SEND_CONFIRMED,
|
||||
PUSH_CODE_STATUS_RESPONSE,
|
||||
PUSH_CODE_TELEMETRY_RESPONSE,
|
||||
PUSH_CODE_TRACE_DATA,
|
||||
RESP_CODE_ADVERT_PATH,
|
||||
RESP_CODE_AUTOADD_CONFIG,
|
||||
RESP_CODE_BATT_AND_STORAGE,
|
||||
RESP_CODE_CHANNEL_INFO,
|
||||
RESP_CODE_CHANNEL_MSG_RECV,
|
||||
RESP_CODE_CHANNEL_MSG_RECV_V3,
|
||||
RESP_CODE_CONTACT,
|
||||
RESP_CODE_CONTACT_MSG_RECV,
|
||||
RESP_CODE_CONTACT_MSG_RECV_V3,
|
||||
RESP_CODE_CONTACTS_START,
|
||||
RESP_CODE_CURR_TIME,
|
||||
RESP_CODE_CUSTOM_VARS,
|
||||
RESP_CODE_DEVICE_INFO,
|
||||
RESP_CODE_DISABLED,
|
||||
RESP_CODE_END_OF_CONTACTS,
|
||||
RESP_CODE_ERR,
|
||||
RESP_CODE_EXPORT_CONTACT,
|
||||
RESP_CODE_NO_MORE_MESSAGES,
|
||||
RESP_CODE_OK,
|
||||
RESP_CODE_PRIVATE_KEY,
|
||||
RESP_CODE_SELF_INFO,
|
||||
RESP_CODE_SENT,
|
||||
RESP_CODE_SIGN_START,
|
||||
RESP_CODE_SIGNATURE,
|
||||
RESP_CODE_STATS,
|
||||
RESP_CODE_TUNING_PARAMS,
|
||||
STATS_TYPE_CORE,
|
||||
STATS_TYPE_PACKETS,
|
||||
STATS_TYPE_RADIO,
|
||||
TELEM_MODE_ALLOW_ALL,
|
||||
TELEM_MODE_ALLOW_FLAGS,
|
||||
TELEM_MODE_DENY,
|
||||
TXT_TYPE_CLI_DATA,
|
||||
TXT_TYPE_PLAIN,
|
||||
TXT_TYPE_SIGNED_PLAIN,
|
||||
BinaryReqType,
|
||||
)
|
||||
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Repeater-specific CompanionFrameServer with SQLite persistence.
|
||||
|
||||
Thin subclass of :class:`pymc_core.companion.frame_server.CompanionFrameServer`
|
||||
that adds SQLite-backed message, contact, and channel persistence via a
|
||||
``sqlite_handler`` dependency.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from pymc_core.companion.constants import RESP_CODE_NO_MORE_MESSAGES
|
||||
from pymc_core.companion.frame_server import CompanionFrameServer as _BaseFrameServer
|
||||
from pymc_core.companion.models import QueuedMessage
|
||||
|
||||
logger = logging.getLogger("CompanionFrameServer")
|
||||
|
||||
|
||||
class CompanionFrameServer(_BaseFrameServer):
|
||||
"""Adds SQLite persistence for messages, contacts, and channels.
|
||||
|
||||
Constructor signature is intentionally kept compatible with the
|
||||
previous monolithic implementation so ``main.py`` call-sites need
|
||||
zero changes.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bridge,
|
||||
companion_hash: str,
|
||||
port: int = 5000,
|
||||
bind_address: str = "0.0.0.0",
|
||||
sqlite_handler=None,
|
||||
local_hash: Optional[int] = None,
|
||||
stats_getter=None,
|
||||
control_handler=None,
|
||||
):
|
||||
super().__init__(
|
||||
bridge=bridge,
|
||||
companion_hash=companion_hash,
|
||||
port=port,
|
||||
bind_address=bind_address,
|
||||
device_model="pyMC-Repeater-Companion",
|
||||
device_version=None, # use FIRMWARE_VER_CODE from pyMC_core
|
||||
build_date="13 Feb 2026",
|
||||
local_hash=local_hash,
|
||||
stats_getter=stats_getter,
|
||||
control_handler=control_handler,
|
||||
)
|
||||
self.sqlite_handler = sqlite_handler
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Persistence hook overrides
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
async def _persist_companion_message(self, msg_dict: dict) -> None:
|
||||
"""Persist message to SQLite and pop from bridge queue."""
|
||||
if not self.sqlite_handler:
|
||||
return
|
||||
await asyncio.to_thread(
|
||||
self.sqlite_handler.companion_push_message,
|
||||
self.companion_hash,
|
||||
msg_dict,
|
||||
)
|
||||
self.bridge.message_queue.pop_last()
|
||||
|
||||
def _sync_next_from_persistence(self) -> Optional[QueuedMessage]:
|
||||
"""Retrieve next message from SQLite when bridge queue is empty."""
|
||||
if not self.sqlite_handler:
|
||||
return None
|
||||
msg_dict = self.sqlite_handler.companion_pop_message(self.companion_hash)
|
||||
if not msg_dict:
|
||||
return None
|
||||
return QueuedMessage(
|
||||
sender_key=msg_dict.get("sender_key", b""),
|
||||
txt_type=msg_dict.get("txt_type", 0),
|
||||
timestamp=msg_dict.get("timestamp", 0),
|
||||
text=msg_dict.get("text", ""),
|
||||
is_channel=bool(msg_dict.get("is_channel", False)),
|
||||
channel_idx=msg_dict.get("channel_idx", 0),
|
||||
path_len=msg_dict.get("path_len", 0),
|
||||
)
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Non-blocking command overrides (keep event loop responsive)
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
async def _cmd_sync_next_message(self, data: bytes) -> None:
|
||||
"""Sync next message; run persistence read in thread so SQLite does not block."""
|
||||
msg = self.bridge.sync_next_message()
|
||||
if msg is None:
|
||||
msg = await asyncio.to_thread(self._sync_next_from_persistence)
|
||||
if msg is None:
|
||||
self._write_frame(bytes([RESP_CODE_NO_MORE_MESSAGES]))
|
||||
return
|
||||
self._write_frame(self._build_message_frame(msg))
|
||||
|
||||
@staticmethod
|
||||
def _contact_to_dict(c) -> dict:
|
||||
"""Convert a Contact object to a persistence dict."""
|
||||
pk = c.public_key if isinstance(c.public_key, bytes) else bytes.fromhex(c.public_key)
|
||||
return {
|
||||
"pubkey": pk,
|
||||
"name": c.name,
|
||||
"adv_type": c.adv_type,
|
||||
"flags": c.flags,
|
||||
"out_path_len": c.out_path_len,
|
||||
"out_path": (
|
||||
c.out_path
|
||||
if isinstance(c.out_path, bytes)
|
||||
else (bytes.fromhex(c.out_path) if c.out_path else b"")
|
||||
),
|
||||
"last_advert_timestamp": c.last_advert_timestamp,
|
||||
"lastmod": c.lastmod,
|
||||
"gps_lat": c.gps_lat,
|
||||
"gps_lon": c.gps_lon,
|
||||
"sync_since": c.sync_since,
|
||||
}
|
||||
|
||||
async def _persist_contact(self, contact) -> None:
|
||||
"""Upsert a single contact to SQLite (non-blocking)."""
|
||||
if not self.sqlite_handler:
|
||||
return
|
||||
contact_dict = self._contact_to_dict(contact)
|
||||
await asyncio.to_thread(
|
||||
self.sqlite_handler.companion_upsert_contact,
|
||||
self.companion_hash,
|
||||
contact_dict,
|
||||
)
|
||||
|
||||
async def _save_contacts(self) -> None:
|
||||
"""Persist all contacts to SQLite (non-blocking)."""
|
||||
if not self.sqlite_handler:
|
||||
return
|
||||
contacts = self.bridge.get_contacts()
|
||||
dicts = [self._contact_to_dict(c) for c in contacts]
|
||||
await asyncio.to_thread(
|
||||
self.sqlite_handler.companion_save_contacts,
|
||||
self.companion_hash,
|
||||
dicts,
|
||||
)
|
||||
|
||||
async def _save_channels(self) -> None:
|
||||
"""Persist channels to SQLite (non-blocking)."""
|
||||
if not self.sqlite_handler:
|
||||
return
|
||||
channels = []
|
||||
max_ch = getattr(getattr(self.bridge, "channels", None), "max_channels", 40)
|
||||
for idx in range(max_ch):
|
||||
ch = self.bridge.get_channel(idx)
|
||||
if ch is not None:
|
||||
channels.append(
|
||||
{
|
||||
"channel_idx": idx,
|
||||
"name": ch.name,
|
||||
"secret": ch.secret,
|
||||
}
|
||||
)
|
||||
await asyncio.to_thread(
|
||||
self.sqlite_handler.companion_save_channels,
|
||||
self.companion_hash,
|
||||
channels,
|
||||
)
|
||||
@@ -0,0 +1,17 @@
|
||||
"""Shared utilities for Companion (e.g. validation for config sync)."""
|
||||
|
||||
_INVALID_NODE_NAME_CHARS = "\n\r\x00"
|
||||
|
||||
|
||||
def validate_companion_node_name(value: str) -> str:
|
||||
"""Validate node_name for config sync: non-empty, max 31 bytes UTF-8, no control chars."""
|
||||
if not isinstance(value, str):
|
||||
raise ValueError("node_name must be a string")
|
||||
s = value.strip()
|
||||
if not s:
|
||||
raise ValueError("node_name cannot be empty")
|
||||
if len(s.encode("utf-8")) > 31:
|
||||
raise ValueError("node_name too long (max 31 bytes UTF-8)")
|
||||
if any(c in s for c in _INVALID_NODE_NAME_CHARS):
|
||||
raise ValueError("node_name contains invalid characters")
|
||||
return s
|
||||
+64
-9
@@ -49,7 +49,7 @@ def get_node_info(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"model": letsmesh_config.get("model", "PyMC-Repeater"),
|
||||
"disallowed_packet_types": disallowed_hex,
|
||||
"email": letsmesh_config.get("email", ""),
|
||||
"owner": letsmesh_config.get("owner", "")
|
||||
"owner": letsmesh_config.get("owner", ""),
|
||||
}
|
||||
|
||||
|
||||
@@ -107,14 +107,21 @@ def save_config(config_data: Dict[str, Any], config_path: Optional[str] = None)
|
||||
# Create backup of existing config
|
||||
config_file = Path(config_path)
|
||||
if config_file.exists():
|
||||
backup_path = config_file.with_suffix('.yaml.backup')
|
||||
backup_path = config_file.with_suffix(".yaml.backup")
|
||||
config_file.rename(backup_path)
|
||||
logger.info(f"Created backup at {backup_path}")
|
||||
|
||||
# Save new config
|
||||
with open(config_path, 'w') as f:
|
||||
yaml.safe_dump(config_data, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
|
||||
# Save new config (allow_unicode=True so emojis etc. are not escaped as \U0001F47E)
|
||||
with open(config_path, "w", encoding="utf-8") as f:
|
||||
yaml.safe_dump(
|
||||
config_data,
|
||||
f,
|
||||
default_flow_style=False,
|
||||
sort_keys=False,
|
||||
allow_unicode=True,
|
||||
width=1000000,
|
||||
)
|
||||
|
||||
logger.info(f"Saved configuration to {config_path}")
|
||||
return True
|
||||
|
||||
@@ -211,7 +218,9 @@ def get_radio_for_board(board_config: dict):
|
||||
return int(value.strip().rstrip(','), 0)
|
||||
raise ValueError(f"Invalid int value type: {type(value)}")
|
||||
|
||||
radio_type = board_config.get("radio_type", "sx1262").lower()
|
||||
radio_type = board_config.get("radio_type", "sx1262").lower().strip()
|
||||
if radio_type == "kiss-modem":
|
||||
radio_type = "kiss"
|
||||
|
||||
if radio_type in ("sx1262", "sx1262_ch341"):
|
||||
from pymc_core.hardware.sx1262_wrapper import SX1262Radio
|
||||
@@ -283,6 +292,52 @@ def get_radio_for_board(board_config: dict):
|
||||
|
||||
return radio
|
||||
|
||||
elif radio_type == "kiss":
|
||||
try:
|
||||
from pymc_core.hardware.kiss_modem_wrapper import KissModemWrapper
|
||||
except ImportError:
|
||||
try:
|
||||
from pymc_core.hardware.kiss_serial_wrapper import (
|
||||
KissSerialWrapper as KissModemWrapper,
|
||||
)
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"KISS modem support requires pyMC_core with KISS support. "
|
||||
"Install your fork with: pip install -e /path/to/pyMC_core"
|
||||
) from None
|
||||
|
||||
kiss_config = board_config.get("kiss")
|
||||
if not kiss_config:
|
||||
raise ValueError("Missing 'kiss' section in configuration file for radio_type: kiss")
|
||||
|
||||
port = kiss_config.get("port")
|
||||
if not port:
|
||||
raise ValueError("Missing 'port' in 'kiss' section (e.g. /dev/ttyUSB0)")
|
||||
|
||||
baudrate = int(kiss_config.get("baud_rate", 115200))
|
||||
radio_cfg = board_config.get("radio") or {}
|
||||
radio_config = {
|
||||
"frequency": int(radio_cfg.get("frequency", 869618000)),
|
||||
"bandwidth": int(radio_cfg.get("bandwidth", 62500)),
|
||||
"spreading_factor": int(radio_cfg.get("spreading_factor", 8)),
|
||||
"coding_rate": int(radio_cfg.get("coding_rate", 8)),
|
||||
"tx_power": int(radio_cfg.get("tx_power", 14)),
|
||||
}
|
||||
radio = KissModemWrapper(
|
||||
port=port,
|
||||
baudrate=baudrate,
|
||||
radio_config=radio_config,
|
||||
auto_configure=True,
|
||||
)
|
||||
|
||||
if hasattr(radio, "begin"):
|
||||
try:
|
||||
radio.begin()
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to initialize KISS modem: {e}") from e
|
||||
|
||||
return radio
|
||||
|
||||
raise RuntimeError(
|
||||
f"Unknown radio type: {radio_type}. Supported: sx1262, sx1262_ch341"
|
||||
f"Unknown radio type: {radio_type}. Supported: sx1262, sx1262_ch341, kiss (or kiss-modem)"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from .sqlite_handler import SQLiteHandler
|
||||
from .rrdtool_handler import RRDToolHandler
|
||||
from .mqtt_handler import MQTTHandler
|
||||
from .rrdtool_handler import RRDToolHandler
|
||||
from .sqlite_handler import SQLiteHandler
|
||||
from .storage_collector import StorageCollector
|
||||
|
||||
__all__ = ['SQLiteHandler', 'RRDToolHandler', 'MQTTHandler', 'StorageCollector']
|
||||
__all__ = ["SQLiteHandler", "RRDToolHandler", "MQTTHandler", "StorageCollector"]
|
||||
|
||||
@@ -5,13 +5,14 @@ KISS - Keep It Simple Stupid approach.
|
||||
|
||||
try:
|
||||
import psutil
|
||||
|
||||
PSUTIL_AVAILABLE = True
|
||||
except ImportError:
|
||||
PSUTIL_AVAILABLE = False
|
||||
psutil = None
|
||||
|
||||
import time
|
||||
import logging
|
||||
import time
|
||||
|
||||
logger = logging.getLogger("HardwareStats")
|
||||
|
||||
@@ -26,10 +27,8 @@ class HardwareStatsCollector:
|
||||
|
||||
if not PSUTIL_AVAILABLE:
|
||||
logger.error("psutil not available - cannot collect hardware stats")
|
||||
return {
|
||||
"error": "psutil library not available - cannot collect hardware statistics"
|
||||
}
|
||||
|
||||
return {"error": "psutil library not available - cannot collect hardware statistics"}
|
||||
|
||||
try:
|
||||
# Get current timestamp
|
||||
now = time.time()
|
||||
@@ -42,10 +41,10 @@ class HardwareStatsCollector:
|
||||
|
||||
# Memory stats
|
||||
memory = psutil.virtual_memory()
|
||||
|
||||
|
||||
# Disk stats
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
disk = psutil.disk_usage("/")
|
||||
|
||||
# Network stats (total across all interfaces)
|
||||
net_io = psutil.net_io_counters()
|
||||
|
||||
@@ -79,48 +78,39 @@ class HardwareStatsCollector:
|
||||
"usage_percent": cpu_percent,
|
||||
"count": cpu_count,
|
||||
"frequency": cpu_freq.current if cpu_freq else 0,
|
||||
"load_avg": {
|
||||
"1min": load_avg[0],
|
||||
"5min": load_avg[1],
|
||||
"15min": load_avg[2]
|
||||
}
|
||||
"load_avg": {"1min": load_avg[0], "5min": load_avg[1], "15min": load_avg[2]},
|
||||
},
|
||||
"memory": {
|
||||
"total": memory.total,
|
||||
"available": memory.available,
|
||||
"used": memory.used,
|
||||
"usage_percent": memory.percent
|
||||
"usage_percent": memory.percent,
|
||||
},
|
||||
"disk": {
|
||||
"total": disk.total,
|
||||
"used": disk.used,
|
||||
"free": disk.free,
|
||||
"usage_percent": round((disk.used / disk.total) * 100, 1)
|
||||
"usage_percent": round((disk.used / disk.total) * 100, 1),
|
||||
},
|
||||
"network": {
|
||||
"bytes_sent": net_io.bytes_sent,
|
||||
"bytes_recv": net_io.bytes_recv,
|
||||
"packets_sent": net_io.packets_sent,
|
||||
"packets_recv": net_io.packets_recv
|
||||
"packets_recv": net_io.packets_recv,
|
||||
},
|
||||
"system": {
|
||||
"uptime": system_uptime,
|
||||
"boot_time": boot_time
|
||||
}
|
||||
"system": {"uptime": system_uptime, "boot_time": boot_time},
|
||||
}
|
||||
|
||||
|
||||
# Add temperatures if available
|
||||
if temperatures:
|
||||
stats["temperatures"] = temperatures
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting hardware stats: {e}")
|
||||
return {
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
return {"error": str(e)}
|
||||
|
||||
def get_processes_summary(self, limit=10):
|
||||
"""
|
||||
Get top processes by CPU and memory usage.
|
||||
@@ -131,44 +121,39 @@ class HardwareStatsCollector:
|
||||
return {
|
||||
"processes": [],
|
||||
"total_processes": 0,
|
||||
"error": "psutil library not available - cannot collect process statistics"
|
||||
"error": "psutil library not available - cannot collect process statistics",
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
processes = []
|
||||
|
||||
|
||||
# Get all processes
|
||||
for proc in psutil.process_iter(['pid', 'name', 'cpu_percent', 'memory_percent', 'memory_info']):
|
||||
for proc in psutil.process_iter(
|
||||
["pid", "name", "cpu_percent", "memory_percent", "memory_info"]
|
||||
):
|
||||
try:
|
||||
pinfo = proc.info
|
||||
# Calculate memory in MB
|
||||
memory_mb = 0
|
||||
if pinfo['memory_info']:
|
||||
memory_mb = pinfo['memory_info'].rss / 1024 / 1024 # RSS in MB
|
||||
|
||||
if pinfo["memory_info"]:
|
||||
memory_mb = pinfo["memory_info"].rss / 1024 / 1024 # RSS in MB
|
||||
|
||||
process_data = {
|
||||
"pid": pinfo['pid'],
|
||||
"name": pinfo['name'] or 'Unknown',
|
||||
"cpu_percent": pinfo['cpu_percent'] or 0.0,
|
||||
"memory_percent": pinfo['memory_percent'] or 0.0,
|
||||
"memory_mb": round(memory_mb, 1)
|
||||
"pid": pinfo["pid"],
|
||||
"name": pinfo["name"] or "Unknown",
|
||||
"cpu_percent": pinfo["cpu_percent"] or 0.0,
|
||||
"memory_percent": pinfo["memory_percent"] or 0.0,
|
||||
"memory_mb": round(memory_mb, 1),
|
||||
}
|
||||
processes.append(process_data)
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
||||
pass
|
||||
|
||||
|
||||
# Sort by CPU usage and get top processes
|
||||
top_processes = sorted(processes, key=lambda x: x['cpu_percent'], reverse=True)[:limit]
|
||||
|
||||
return {
|
||||
"processes": top_processes,
|
||||
"total_processes": len(processes)
|
||||
}
|
||||
|
||||
top_processes = sorted(processes, key=lambda x: x["cpu_percent"], reverse=True)[:limit]
|
||||
|
||||
return {"processes": top_processes, "total_processes": len(processes)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting process stats: {e}")
|
||||
return {
|
||||
"processes": [],
|
||||
"total_processes": 0,
|
||||
"error": str(e)
|
||||
}
|
||||
return {"processes": [], "total_processes": 0, "error": str(e)}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import base64
|
||||
import binascii
|
||||
import json
|
||||
import logging
|
||||
import binascii
|
||||
import base64
|
||||
import paho.mqtt.client as mqtt
|
||||
import threading
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, List, Optional
|
||||
|
||||
import paho.mqtt.client as mqtt
|
||||
from nacl.signing import SigningKey
|
||||
from typing import Callable, Optional, List, Dict
|
||||
from .. import __version__
|
||||
|
||||
# Try to import datetime.UTC (Python 3.11+) otherwise fallback to timezone.utc
|
||||
try:
|
||||
@@ -17,15 +16,19 @@ except Exception:
|
||||
from datetime import timezone
|
||||
UTC = timezone.utc
|
||||
|
||||
from repeater import __version__
|
||||
|
||||
# Try to import paho-mqtt error code mappings
|
||||
try:
|
||||
from paho.mqtt.reasoncodes import ReasonCode
|
||||
|
||||
HAS_REASON_CODES = True
|
||||
except ImportError:
|
||||
HAS_REASON_CODES = False
|
||||
|
||||
logger = logging.getLogger("LetsMeshHandler")
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Helper: Base64URL without padding
|
||||
# --------------------------------------------------------------------
|
||||
@@ -124,7 +127,7 @@ class _BrokerConnection:
|
||||
payload_b64 = b64url(json.dumps(payload, separators=(",", ":")).encode())
|
||||
|
||||
signing_input = f"{header_b64}.{payload_b64}".encode()
|
||||
|
||||
|
||||
# Sign using LocalIdentity (supports both standard and firmware keys)
|
||||
try:
|
||||
signature = self.local_identity.sign(signing_input)
|
||||
@@ -133,10 +136,10 @@ class _BrokerConnection:
|
||||
logging.error(f" - public_key: {self.public_key}")
|
||||
logging.error(f" - signing_input length: {len(signing_input)}")
|
||||
raise
|
||||
|
||||
|
||||
signature_hex = binascii.hexlify(signature).decode()
|
||||
token = f"{header_b64}.{payload_b64}.{signature_hex}"
|
||||
|
||||
|
||||
logging.debug(f"JWT token generated for {self.broker['name']}: {token[:50]}...")
|
||||
|
||||
return token
|
||||
@@ -159,7 +162,7 @@ class _BrokerConnection:
|
||||
"""MQTT disconnection callback"""
|
||||
was_running = self._running
|
||||
self._running = False
|
||||
|
||||
|
||||
if rc != 0: # Unexpected disconnect
|
||||
error_msg = get_mqtt_error_message(rc, is_disconnect=True)
|
||||
logging.warning(f"Disconnected from {self.broker['name']} (rc={rc}): {error_msg}")
|
||||
@@ -167,7 +170,7 @@ class _BrokerConnection:
|
||||
self._schedule_reconnect(reason=error_msg)
|
||||
else:
|
||||
logging.info(f"Clean disconnect from {self.broker['name']}")
|
||||
|
||||
|
||||
if self._on_disconnect_callback:
|
||||
self._on_disconnect_callback(self.broker["name"])
|
||||
|
||||
@@ -175,29 +178,31 @@ class _BrokerConnection:
|
||||
"""Schedule reconnection with exponential backoff"""
|
||||
if self._reconnect_timer:
|
||||
self._reconnect_timer.cancel()
|
||||
|
||||
|
||||
# Exponential backoff: 5s, 10s, 20s, 40s, 80s, up to max
|
||||
delay = min(5 * (2 ** self._reconnect_attempts), self._max_reconnect_delay)
|
||||
delay = min(5 * (2**self._reconnect_attempts), self._max_reconnect_delay)
|
||||
self._reconnect_attempts += 1
|
||||
|
||||
logging.info(f"Scheduling reconnect to {self.broker['name']} in {delay}s (attempt {self._reconnect_attempts}, reason: {reason})")
|
||||
|
||||
logging.info(
|
||||
f"Scheduling reconnect to {self.broker['name']} in {delay}s (attempt {self._reconnect_attempts}, reason: {reason})"
|
||||
)
|
||||
self._reconnect_timer = threading.Timer(delay, lambda: self._attempt_reconnect(reason))
|
||||
self._reconnect_timer.daemon = True
|
||||
self._reconnect_timer.start()
|
||||
|
||||
|
||||
def _attempt_reconnect(self, reason: str = "connection lost"):
|
||||
"""Attempt to reconnect to broker with fresh JWT"""
|
||||
try:
|
||||
logging.info(f"Attempting reconnection to {self.broker['name']} (reason: {reason})...")
|
||||
|
||||
|
||||
# Stop the loop if it's still running (websocket mode requires clean restart)
|
||||
try:
|
||||
self.client.loop_stop()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
self._set_jwt_credentials()
|
||||
|
||||
|
||||
# Reconnect and restart loop
|
||||
self.client.connect(self.broker["host"], self.broker["port"], keepalive=60)
|
||||
self.client.loop_start()
|
||||
@@ -205,7 +210,7 @@ class _BrokerConnection:
|
||||
except Exception as e:
|
||||
logging.error(f"Reconnection failed for {self.broker['name']}: {e}")
|
||||
self._schedule_reconnect() # Try again later
|
||||
|
||||
|
||||
def _set_jwt_credentials(self):
|
||||
"""Set JWT token credentials before connecting (CONNECT handshake only)"""
|
||||
try:
|
||||
@@ -249,7 +254,7 @@ class _BrokerConnection:
|
||||
"""Disconnect from broker"""
|
||||
self._running = False
|
||||
self._loop_running = False
|
||||
|
||||
|
||||
# Cancel any pending timers
|
||||
if self._reconnect_timer:
|
||||
self._reconnect_timer.cancel()
|
||||
@@ -257,7 +262,7 @@ class _BrokerConnection:
|
||||
if self._jwt_refresh_timer:
|
||||
self._jwt_refresh_timer.cancel()
|
||||
self._jwt_refresh_timer = None
|
||||
|
||||
|
||||
self.client.loop_stop()
|
||||
self.client.disconnect()
|
||||
logging.info(f"Disconnected from {self.broker['name']}")
|
||||
@@ -272,7 +277,7 @@ class _BrokerConnection:
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if connection is active"""
|
||||
return self._running
|
||||
|
||||
|
||||
def has_pending_reconnect(self) -> bool:
|
||||
"""Check if a reconnection is scheduled"""
|
||||
return self._reconnect_timer is not None and self._reconnect_timer.is_alive()
|
||||
@@ -288,19 +293,19 @@ class _BrokerConnection:
|
||||
stagger_offset = self.broker_index * 0.05
|
||||
refresh_threshold = 0.80 + stagger_offset
|
||||
return elapsed >= expiry_seconds * refresh_threshold
|
||||
|
||||
|
||||
def _schedule_jwt_refresh(self):
|
||||
"""Schedule proactive JWT refresh before token expires"""
|
||||
if self._jwt_refresh_timer:
|
||||
self._jwt_refresh_timer.cancel()
|
||||
|
||||
|
||||
expiry_seconds = self.jwt_expiry_minutes * 60
|
||||
# Stagger refresh by 5% per broker to prevent simultaneous disconnects
|
||||
# Broker 0: 80%, Broker 1: 85%, Broker 2: 90%, etc.
|
||||
stagger_offset = self.broker_index * 0.05
|
||||
refresh_threshold = 0.80 + stagger_offset
|
||||
refresh_delay = expiry_seconds * refresh_threshold
|
||||
|
||||
|
||||
logging.info(
|
||||
f"JWT refresh scheduled for {self.broker['name']} in {refresh_delay:.0f}s "
|
||||
f"({refresh_threshold*100:.0f}% of {self.jwt_expiry_minutes}min token lifetime)"
|
||||
@@ -308,12 +313,12 @@ class _BrokerConnection:
|
||||
self._jwt_refresh_timer = threading.Timer(refresh_delay, self.reconnect_for_token_expiry)
|
||||
self._jwt_refresh_timer.daemon = True
|
||||
self._jwt_refresh_timer.start()
|
||||
|
||||
|
||||
def reconnect_for_token_expiry(self):
|
||||
"""Proactively reconnect with new JWT before current one expires"""
|
||||
if not self._running:
|
||||
return
|
||||
|
||||
|
||||
logging.info(f"JWT token expiring soon for {self.broker['name']}, refreshing...")
|
||||
self._running = False
|
||||
self._jwt_refresh_timer = None
|
||||
@@ -337,7 +342,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
# Store local identity and get public key
|
||||
self.local_identity = local_identity
|
||||
public_key = local_identity.get_public_key().hex().upper()
|
||||
|
||||
|
||||
# Extract values from config
|
||||
from ..config import get_node_info
|
||||
|
||||
@@ -363,9 +368,11 @@ class MeshCoreToMqttJwtPusher:
|
||||
elif broker_index is None or broker_index == -1:
|
||||
# Connect to all built-in brokers + additional ones
|
||||
self.brokers = LETSMESH_BROKERS.copy()
|
||||
logging.info(f"Multi-broker mode: connecting to all {len(LETSMESH_BROKERS)} built-in brokers")
|
||||
logging.info(
|
||||
f"Multi-broker mode: connecting to all {len(LETSMESH_BROKERS)} built-in brokers"
|
||||
)
|
||||
else:
|
||||
|
||||
|
||||
if broker_index >= len(LETSMESH_BROKERS):
|
||||
raise ValueError(f"Invalid broker_index {broker_index}")
|
||||
self.brokers = [LETSMESH_BROKERS[broker_index]]
|
||||
@@ -379,7 +386,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
logging.info(f"Added custom broker: {broker_config['name']}")
|
||||
else:
|
||||
logging.warning(f"Skipping invalid broker config: {broker_config}")
|
||||
|
||||
|
||||
# Validate that we have at least one broker
|
||||
if not self.brokers:
|
||||
raise ValueError(
|
||||
@@ -439,7 +446,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
# Check if all connections are down AND none have pending reconnects
|
||||
all_down = all(not conn.is_connected() for conn in self.connections)
|
||||
any_reconnecting = any(conn.has_pending_reconnect() for conn in self.connections)
|
||||
|
||||
|
||||
if all_down and not any_reconnecting:
|
||||
logging.warning("All broker connections lost with no pending reconnects")
|
||||
elif all_down:
|
||||
@@ -461,7 +468,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
timer.start()
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to connect to {conn.broker['name']}: {e}")
|
||||
|
||||
|
||||
def _delayed_connect(self, conn):
|
||||
"""Connect a broker after a delay (called by timer)"""
|
||||
try:
|
||||
@@ -478,6 +485,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
self.publish_status(state="offline", origin=self.node_name, radio_config=self.radio_config)
|
||||
|
||||
import time
|
||||
|
||||
time.sleep(0.5) # Give time for messages to be sent
|
||||
|
||||
# Disconnect all brokers
|
||||
@@ -500,7 +508,7 @@ class MeshCoreToMqttJwtPusher:
|
||||
state="online", origin=self.node_name, radio_config=self.radio_config
|
||||
)
|
||||
logging.debug(f"Status heartbeat sent (next in {self.status_interval}s)")
|
||||
|
||||
|
||||
time.sleep(self.status_interval)
|
||||
except Exception as e:
|
||||
logging.error(f"Status heartbeat error: {e}")
|
||||
@@ -586,28 +594,28 @@ class MeshCoreToMqttJwtPusher:
|
||||
# Helper Functions
|
||||
# ====================================================================
|
||||
|
||||
|
||||
def get_mqtt_error_message(rc: int, is_disconnect: bool = False) -> str:
|
||||
"""
|
||||
Get human-readable MQTT error message.
|
||||
|
||||
|
||||
Args:
|
||||
rc: Return code from paho-mqtt
|
||||
is_disconnect: True if from on_disconnect, False if from on_connect
|
||||
|
||||
|
||||
Returns:
|
||||
Human-readable error message
|
||||
"""
|
||||
if HAS_REASON_CODES:
|
||||
try:
|
||||
# ReasonCode object has getName() method and value property
|
||||
from paho.mqtt.reasoncodes import ReasonCode
|
||||
reason = ReasonCode(mqtt.CONNACK if not is_disconnect else mqtt.DISCONNECT, identifier=rc)
|
||||
name = reason.getName() if hasattr(reason, 'getName') else str(reason)
|
||||
return f"{name} (code {rc})"
|
||||
except Exception as e:
|
||||
# Log the exception for debugging
|
||||
logger.debug(f"Could not decode reason code {rc}: {e}")
|
||||
|
||||
|
||||
# Fallback to manual mappings - Extended with MQTT v5 codes
|
||||
connect_errors = {
|
||||
0: "Connection accepted",
|
||||
@@ -639,7 +647,7 @@ def get_mqtt_error_message(rc: int, is_disconnect: bool = False) -> str:
|
||||
157: "Server moved",
|
||||
159: "Connection rate exceeded",
|
||||
}
|
||||
|
||||
|
||||
disconnect_errors = {
|
||||
0: "Normal disconnect",
|
||||
1: "Unacceptable protocol version",
|
||||
@@ -680,7 +688,6 @@ def get_mqtt_error_message(rc: int, is_disconnect: bool = False) -> str:
|
||||
161: "Subscription identifiers not supported",
|
||||
162: "Wildcard subscriptions not supported",
|
||||
}
|
||||
|
||||
|
||||
error_dict = disconnect_errors if is_disconnect else connect_errors
|
||||
return error_dict.get(rc, f"Unknown error code {rc}")
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Dict, Any, Optional
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
try:
|
||||
import paho.mqtt.client as mqtt
|
||||
|
||||
MQTT_AVAILABLE = True
|
||||
except ImportError:
|
||||
MQTT_AVAILABLE = False
|
||||
@@ -102,17 +103,17 @@ class MQTTHandler:
|
||||
try:
|
||||
base_topic = self.mqtt_config.get("base_topic", "meshcore/repeater")
|
||||
topic = f"{base_topic}/{self.node_name}/{record_type}"
|
||||
|
||||
|
||||
if record_type == "packet":
|
||||
packet_record = PacketRecord.from_packet_record(
|
||||
record,
|
||||
origin=self.node_name,
|
||||
origin_id=self.node_id
|
||||
record, origin=self.node_name, origin_id=self.node_id
|
||||
)
|
||||
if not packet_record:
|
||||
logger.debug("Skipping MQTT publish: packet missing required data for PacketRecord")
|
||||
logger.debug(
|
||||
"Skipping MQTT publish: packet missing required data for PacketRecord"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
payload = packet_record.to_dict()
|
||||
logger.debug("Publishing packet using PacketRecord format")
|
||||
else:
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
try:
|
||||
import rrdtool
|
||||
|
||||
RRDTOOL_AVAILABLE = True
|
||||
except ImportError:
|
||||
RRDTOOL_AVAILABLE = False
|
||||
@@ -23,17 +24,18 @@ class RRDToolHandler:
|
||||
if not self.available:
|
||||
logger.warning("RRDTool not available - skipping RRD initialization")
|
||||
return
|
||||
|
||||
|
||||
if self.rrd_path.exists():
|
||||
logger.info(f"RRD database exists: {self.rrd_path}")
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
rrdtool.create(
|
||||
str(self.rrd_path),
|
||||
"--step", "60",
|
||||
"--start", str(int(time.time() - 60)),
|
||||
|
||||
"--step",
|
||||
"60",
|
||||
"--start",
|
||||
str(int(time.time() - 60)),
|
||||
"DS:rx_count:COUNTER:120:0:U",
|
||||
"DS:tx_count:COUNTER:120:0:U",
|
||||
"DS:drop_count:COUNTER:120:0:U",
|
||||
@@ -42,7 +44,6 @@ class RRDToolHandler:
|
||||
"DS:avg_length:GAUGE:120:0:256",
|
||||
"DS:avg_score:GAUGE:120:0:1",
|
||||
"DS:neighbor_count:GAUGE:120:0:U",
|
||||
|
||||
"DS:type_0:COUNTER:120:0:U",
|
||||
"DS:type_1:COUNTER:120:0:U",
|
||||
"DS:type_2:COUNTER:120:0:U",
|
||||
@@ -60,25 +61,24 @@ class RRDToolHandler:
|
||||
"DS:type_14:COUNTER:120:0:U",
|
||||
"DS:type_15:COUNTER:120:0:U",
|
||||
"DS:type_other:COUNTER:120:0:U",
|
||||
|
||||
"RRA:AVERAGE:0.5:1:10080",
|
||||
"RRA:AVERAGE:0.5:5:8640",
|
||||
"RRA:AVERAGE:0.5:60:8760",
|
||||
"RRA:MAX:0.5:1:10080",
|
||||
"RRA:MIN:0.5:1:10080"
|
||||
"RRA:MIN:0.5:1:10080",
|
||||
)
|
||||
logger.info(f"RRD database created: {self.rrd_path}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create RRD database: {e}")
|
||||
|
||||
def update_packet_metrics(self, record: dict, cumulative_counts: dict):
|
||||
if not self.available or not self.rrd_path.exists():
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
timestamp = int(record.get("timestamp", time.time()))
|
||||
|
||||
|
||||
try:
|
||||
info = rrdtool.info(str(self.rrd_path))
|
||||
last_update = int(info.get("last_update", timestamp - 60))
|
||||
@@ -86,104 +86,114 @@ class RRDToolHandler:
|
||||
return
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to get RRD info for packet update: {e}")
|
||||
|
||||
|
||||
rx_total = cumulative_counts.get("rx_total", 0)
|
||||
tx_total = cumulative_counts.get("tx_total", 0)
|
||||
drop_total = cumulative_counts.get("drop_total", 0)
|
||||
type_counts = cumulative_counts.get("type_counts", {})
|
||||
|
||||
|
||||
type_values = []
|
||||
for i in range(16):
|
||||
type_values.append(str(type_counts.get(f"type_{i}", 0)))
|
||||
type_values.append(str(type_counts.get("type_other", 0)))
|
||||
|
||||
|
||||
# Handle None values for TX packets - use 'U' (unknown) for RRD
|
||||
rssi = record.get('rssi')
|
||||
snr = record.get('snr')
|
||||
score = record.get('score')
|
||||
|
||||
rssi_val = 'U' if rssi is None else str(rssi)
|
||||
snr_val = 'U' if snr is None else str(snr)
|
||||
score_val = 'U' if score is None else str(score)
|
||||
length_val = str(record.get('length', 0))
|
||||
|
||||
basic_values = f"{timestamp}:{rx_total}:{tx_total}:{drop_total}:" \
|
||||
f"{rssi_val}:{snr_val}:{length_val}:{score_val}:" \
|
||||
f"U"
|
||||
|
||||
rssi = record.get("rssi")
|
||||
snr = record.get("snr")
|
||||
score = record.get("score")
|
||||
|
||||
rssi_val = "U" if rssi is None else str(rssi)
|
||||
snr_val = "U" if snr is None else str(snr)
|
||||
score_val = "U" if score is None else str(score)
|
||||
length_val = str(record.get("length", 0))
|
||||
|
||||
basic_values = (
|
||||
f"{timestamp}:{rx_total}:{tx_total}:{drop_total}:"
|
||||
f"{rssi_val}:{snr_val}:{length_val}:{score_val}:"
|
||||
f"U"
|
||||
)
|
||||
|
||||
type_values_str = ":".join(type_values)
|
||||
values = f"{basic_values}:{type_values_str}"
|
||||
|
||||
|
||||
rrdtool.update(str(self.rrd_path), values)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update RRD packet metrics: {e}")
|
||||
logger.debug(f"RRD packet update failed - record: {record}")
|
||||
|
||||
def get_data(self, start_time: Optional[int] = None, end_time: Optional[int] = None,
|
||||
resolution: str = "average") -> Optional[dict]:
|
||||
def get_data(
|
||||
self,
|
||||
start_time: Optional[int] = None,
|
||||
end_time: Optional[int] = None,
|
||||
resolution: str = "average",
|
||||
) -> Optional[dict]:
|
||||
if not self.available or not self.rrd_path.exists():
|
||||
logger.error(f"RRD not available: available={self.available}, rrd_path exists={self.rrd_path.exists()}")
|
||||
logger.error(
|
||||
f"RRD not available: available={self.available}, rrd_path exists={self.rrd_path.exists()}"
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
try:
|
||||
if end_time is None:
|
||||
end_time = int(time.time())
|
||||
if start_time is None:
|
||||
start_time = end_time - (24 * 3600)
|
||||
|
||||
|
||||
fetch_result = rrdtool.fetch(
|
||||
str(self.rrd_path),
|
||||
resolution.upper(),
|
||||
"--start", str(start_time),
|
||||
"--end", str(end_time)
|
||||
"--start",
|
||||
str(start_time),
|
||||
"--end",
|
||||
str(end_time),
|
||||
)
|
||||
|
||||
|
||||
if not fetch_result:
|
||||
logger.error("RRD fetch returned None")
|
||||
return None
|
||||
|
||||
|
||||
(start, end, step), data_sources, data_points = fetch_result
|
||||
|
||||
|
||||
if not data_points:
|
||||
logger.warning("No data points returned from RRD fetch")
|
||||
|
||||
|
||||
result = {
|
||||
"start_time": start,
|
||||
"end_time": end,
|
||||
"step": step,
|
||||
"data_sources": data_sources,
|
||||
"packet_types": {},
|
||||
"metrics": {}
|
||||
"metrics": {},
|
||||
}
|
||||
|
||||
|
||||
timestamps = []
|
||||
current_time = start
|
||||
|
||||
|
||||
for ds in data_sources:
|
||||
if ds.startswith('type_'):
|
||||
if 'packet_types' not in result:
|
||||
result['packet_types'] = {}
|
||||
result['packet_types'][ds] = []
|
||||
if ds.startswith("type_"):
|
||||
if "packet_types" not in result:
|
||||
result["packet_types"] = {}
|
||||
result["packet_types"][ds] = []
|
||||
else:
|
||||
result['metrics'][ds] = []
|
||||
|
||||
result["metrics"][ds] = []
|
||||
|
||||
for point in data_points:
|
||||
timestamps.append(current_time)
|
||||
|
||||
|
||||
for i, value in enumerate(point):
|
||||
ds_name = data_sources[i]
|
||||
if ds_name.startswith('type_'):
|
||||
result['packet_types'][ds_name].append(value)
|
||||
if ds_name.startswith("type_"):
|
||||
result["packet_types"][ds_name].append(value)
|
||||
else:
|
||||
result['metrics'][ds_name].append(value)
|
||||
|
||||
result["metrics"][ds_name].append(value)
|
||||
|
||||
current_time += step
|
||||
|
||||
result['timestamps'] = timestamps
|
||||
|
||||
|
||||
result["timestamps"] = timestamps
|
||||
|
||||
return result
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get RRD data: {e}")
|
||||
return None
|
||||
@@ -192,65 +202,65 @@ class RRDToolHandler:
|
||||
try:
|
||||
end_time = int(time.time())
|
||||
start_time = end_time - (hours * 3600)
|
||||
|
||||
|
||||
rrd_data = self.get_data(start_time, end_time)
|
||||
if not rrd_data or 'packet_types' not in rrd_data:
|
||||
if not rrd_data or "packet_types" not in rrd_data:
|
||||
logger.warning(f"No RRD data available")
|
||||
return None
|
||||
|
||||
|
||||
type_totals = {}
|
||||
packet_type_names = {
|
||||
'type_0': 'Request (REQ)',
|
||||
'type_1': 'Response (RESPONSE)',
|
||||
'type_2': 'Plain Text Message (TXT_MSG)',
|
||||
'type_3': 'Acknowledgment (ACK)',
|
||||
'type_4': 'Node Advertisement (ADVERT)',
|
||||
'type_5': 'Group Text Message (GRP_TXT)',
|
||||
'type_6': 'Group Datagram (GRP_DATA)',
|
||||
'type_7': 'Anonymous Request (ANON_REQ)',
|
||||
'type_8': 'Returned Path (PATH)',
|
||||
'type_9': 'Trace (TRACE)',
|
||||
'type_10': 'Multi-part Packet',
|
||||
'type_11': 'Control Packet Data',
|
||||
'type_12': 'Reserved Type 12',
|
||||
'type_13': 'Reserved Type 13',
|
||||
'type_14': 'Reserved Type 14',
|
||||
'type_15': 'Custom Packet (RAW_CUSTOM)',
|
||||
'type_other': 'Other Types (>15)'
|
||||
"type_0": "Request (REQ)",
|
||||
"type_1": "Response (RESPONSE)",
|
||||
"type_2": "Plain Text Message (TXT_MSG)",
|
||||
"type_3": "Acknowledgment (ACK)",
|
||||
"type_4": "Node Advertisement (ADVERT)",
|
||||
"type_5": "Group Text Message (GRP_TXT)",
|
||||
"type_6": "Group Datagram (GRP_DATA)",
|
||||
"type_7": "Anonymous Request (ANON_REQ)",
|
||||
"type_8": "Returned Path (PATH)",
|
||||
"type_9": "Trace (TRACE)",
|
||||
"type_10": "Multi-part Packet (MULTIPART)",
|
||||
"type_11": "Control (CONTROL)",
|
||||
"type_12": "Reserved Type 12",
|
||||
"type_13": "Reserved Type 13",
|
||||
"type_14": "Reserved Type 14",
|
||||
"type_15": "Custom Packet (RAW_CUSTOM)",
|
||||
"type_other": "Other Types (>15)",
|
||||
}
|
||||
|
||||
|
||||
total_valid_points = 0
|
||||
for type_key, data_points in rrd_data['packet_types'].items():
|
||||
for type_key, data_points in rrd_data["packet_types"].items():
|
||||
valid_points = [p for p in data_points if p is not None]
|
||||
total_valid_points += len(valid_points)
|
||||
|
||||
|
||||
if total_valid_points < 10:
|
||||
logger.warning(f"RRD data too sparse ({total_valid_points} valid points)")
|
||||
return None
|
||||
|
||||
for type_key, data_points in rrd_data['packet_types'].items():
|
||||
|
||||
for type_key, data_points in rrd_data["packet_types"].items():
|
||||
valid_points = [p for p in data_points if p is not None]
|
||||
|
||||
|
||||
if len(valid_points) >= 2:
|
||||
total = max(valid_points) - min(valid_points)
|
||||
elif len(valid_points) == 1:
|
||||
total = valid_points[0]
|
||||
else:
|
||||
total = 0
|
||||
|
||||
|
||||
type_name = packet_type_names.get(type_key, type_key)
|
||||
type_totals[type_name] = max(0, total or 0)
|
||||
|
||||
|
||||
result = {
|
||||
"hours": hours,
|
||||
"packet_type_totals": type_totals,
|
||||
"total_packets": sum(type_totals.values()),
|
||||
"period": f"{hours} hours",
|
||||
"data_source": "rrd"
|
||||
"data_source": "rrd",
|
||||
}
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get packet type stats from RRD: {e}")
|
||||
return None
|
||||
return None
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,15 +3,14 @@ import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from .sqlite_handler import SQLiteHandler
|
||||
from .rrdtool_handler import RRDToolHandler
|
||||
from .mqtt_handler import MQTTHandler
|
||||
from .letsmesh_handler import MeshCoreToMqttJwtPusher
|
||||
from .mqtt_handler import MQTTHandler
|
||||
from .rrdtool_handler import RRDToolHandler
|
||||
from .sqlite_handler import SQLiteHandler
|
||||
from .storage_utils import PacketRecord
|
||||
|
||||
|
||||
logger = logging.getLogger("StorageCollector")
|
||||
|
||||
|
||||
@@ -67,16 +66,18 @@ class StorageCollector:
|
||||
self.disallowed_packet_types = set()
|
||||
else:
|
||||
self.disallowed_packet_types = set()
|
||||
|
||||
|
||||
# Initialize hardware stats collector
|
||||
from .hardware_stats import HardwareStatsCollector
|
||||
|
||||
self.hardware_stats = HardwareStatsCollector()
|
||||
logger.info("Hardware stats collector initialized")
|
||||
|
||||
|
||||
# Initialize WebSocket handler for real-time updates
|
||||
self.websocket_available = False
|
||||
try:
|
||||
from .websocket_handler import broadcast_packet, broadcast_stats
|
||||
|
||||
self.websocket_broadcast_packet = broadcast_packet
|
||||
self.websocket_broadcast_stats = broadcast_stats
|
||||
self.websocket_available = True
|
||||
@@ -92,23 +93,23 @@ class StorageCollector:
|
||||
"packets_sent": 0,
|
||||
"packets_received": 0,
|
||||
"errors": 0,
|
||||
"queue_len": 0
|
||||
"queue_len": 0,
|
||||
}
|
||||
|
||||
uptime_secs = int(time.time() - self.repeater_handler.start_time)
|
||||
|
||||
|
||||
# Get airtime stats
|
||||
airtime_stats = self.repeater_handler.airtime_mgr.get_stats()
|
||||
|
||||
|
||||
# Get latest noise floor from database
|
||||
noise_floor = None
|
||||
try:
|
||||
recent_noise = self.sqlite_handler.get_noise_floor_history(hours=0.5, limit=1)
|
||||
if recent_noise and len(recent_noise) > 0:
|
||||
noise_floor = recent_noise[-1].get('noise_floor_dbm')
|
||||
noise_floor = recent_noise[-1].get("noise_floor_dbm")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not fetch noise floor: {e}")
|
||||
|
||||
|
||||
stats = {
|
||||
"uptime_secs": uptime_secs,
|
||||
"packets_sent": self.repeater_handler.forwarded_count,
|
||||
@@ -116,22 +117,22 @@ class StorageCollector:
|
||||
"errors": 0,
|
||||
"queue_len": 0, # N/A for Python repeater
|
||||
}
|
||||
|
||||
|
||||
# Add airtime stats
|
||||
if airtime_stats:
|
||||
stats["tx_air_secs"] = airtime_stats["total_airtime_ms"] / 1000
|
||||
stats["current_airtime_ms"] = airtime_stats["current_airtime_ms"]
|
||||
stats["utilization_percent"] = airtime_stats["utilization_percent"]
|
||||
|
||||
|
||||
# Add noise floor if available
|
||||
if noise_floor is not None:
|
||||
stats["noise_floor"] = noise_floor
|
||||
|
||||
|
||||
return stats
|
||||
|
||||
def record_packet(self, packet_record: dict, skip_letsmesh_if_invalid: bool = True):
|
||||
"""Record packet to storage and publish to MQTT/LetsMesh
|
||||
|
||||
|
||||
Args:
|
||||
packet_record: Dictionary containing packet information
|
||||
skip_letsmesh_if_invalid: If True, don't publish packets with drop_reason to LetsMesh
|
||||
@@ -146,28 +147,34 @@ class StorageCollector:
|
||||
cumulative_counts = self.sqlite_handler.get_cumulative_counts()
|
||||
self.rrd_handler.update_packet_metrics(packet_record, cumulative_counts)
|
||||
self.mqtt_handler.publish(packet_record, "packet")
|
||||
|
||||
|
||||
# Broadcast to WebSocket clients for real-time updates
|
||||
if self.websocket_available:
|
||||
try:
|
||||
self.websocket_broadcast_packet(packet_record)
|
||||
|
||||
|
||||
# Broadcast 24-hour packet stats (same as /api/packet_stats?hours=24)
|
||||
packet_stats_24h = self.sqlite_handler.get_packet_stats(hours=24)
|
||||
uptime_seconds = time.time() - self.repeater_handler.start_time if self.repeater_handler else 0
|
||||
|
||||
self.websocket_broadcast_stats({
|
||||
"packet_stats": packet_stats_24h,
|
||||
"system_stats": {
|
||||
"uptime_seconds": uptime_seconds,
|
||||
uptime_seconds = (
|
||||
time.time() - self.repeater_handler.start_time if self.repeater_handler else 0
|
||||
)
|
||||
|
||||
self.websocket_broadcast_stats(
|
||||
{
|
||||
"packet_stats": packet_stats_24h,
|
||||
"system_stats": {
|
||||
"uptime_seconds": uptime_seconds,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"WebSocket broadcast failed: {e}")
|
||||
|
||||
# Publish to LetsMesh if enabled (skip invalid packets if requested)
|
||||
if skip_letsmesh_if_invalid and packet_record.get('drop_reason'):
|
||||
logger.debug(f"Skipping LetsMesh publish for packet with drop_reason: {packet_record.get('drop_reason')}")
|
||||
if skip_letsmesh_if_invalid and packet_record.get("drop_reason"):
|
||||
logger.debug(
|
||||
f"Skipping LetsMesh publish for packet with drop_reason: {packet_record.get('drop_reason')}"
|
||||
)
|
||||
else:
|
||||
self._publish_to_letsmesh(packet_record)
|
||||
|
||||
@@ -264,23 +271,24 @@ class StorageCollector:
|
||||
|
||||
def get_neighbors(self) -> dict:
|
||||
return self.sqlite_handler.get_neighbors()
|
||||
|
||||
|
||||
def get_node_name_by_pubkey(self, pubkey: str) -> Optional[str]:
|
||||
"""
|
||||
Lookup node name from adverts table by public key.
|
||||
|
||||
|
||||
Args:
|
||||
pubkey: Public key in hex string format
|
||||
|
||||
|
||||
Returns:
|
||||
Node name if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
import sqlite3
|
||||
|
||||
with sqlite3.connect(self.sqlite_handler.sqlite_path) as conn:
|
||||
result = conn.execute(
|
||||
"SELECT node_name FROM adverts WHERE pubkey = ? AND node_name IS NOT NULL ORDER BY last_seen DESC LIMIT 1",
|
||||
(pubkey,)
|
||||
(pubkey,),
|
||||
).fetchone()
|
||||
return result[0] if result else None
|
||||
except Exception as e:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Storage utility classes and functions for data acquisition."""
|
||||
|
||||
from dataclasses import dataclass, asdict
|
||||
from dataclasses import asdict, dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
"""
|
||||
WebSocket handler for real-time packet updates - simple ws4py implementation
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import cherrypy
|
||||
from urllib.parse import parse_qs
|
||||
from ws4py.websocket import WebSocket
|
||||
|
||||
import cherrypy
|
||||
from ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
|
||||
from ws4py.websocket import WebSocket
|
||||
|
||||
logger = logging.getLogger("WebSocket")
|
||||
|
||||
# Suppress noisy ws4py error logs for normal disconnections (ConnectionResetError, etc.)
|
||||
logging.getLogger('ws4py').setLevel(logging.CRITICAL)
|
||||
logging.getLogger("ws4py").setLevel(logging.CRITICAL)
|
||||
|
||||
# Global set of connected clients
|
||||
_connected_clients = set()
|
||||
@@ -69,14 +71,18 @@ class PacketWebSocket(WebSocket):
|
||||
# Auth success - store user and add to connected clients
|
||||
self.user = payload.get("sub") # type: ignore[attr-defined]
|
||||
_connected_clients.add(self)
|
||||
logger.info(f"WebSocket connected ({self.user or 'unknown user'}). Total clients: {len(_connected_clients)}")
|
||||
|
||||
logger.info(
|
||||
f"WebSocket connected ({self.user or 'unknown user'}). Total clients: {len(_connected_clients)}"
|
||||
)
|
||||
|
||||
def closed(self, code, reason=None):
|
||||
"""Called when a WebSocket connection is closed"""
|
||||
_connected_clients.discard(self)
|
||||
user = getattr(self, 'user', 'unknown')
|
||||
logger.info(f"WebSocket disconnected (user: {user}, code: {code}, reason: {reason}). Total clients: {len(_connected_clients)}")
|
||||
|
||||
user = getattr(self, "user", "unknown")
|
||||
logger.info(
|
||||
f"WebSocket disconnected (user: {user}, code: {code}, reason: {reason}). Total clients: {len(_connected_clients)}"
|
||||
)
|
||||
|
||||
def received_message(self, message):
|
||||
"""Handle messages from client"""
|
||||
try:
|
||||
|
||||
+217
-101
@@ -16,11 +16,10 @@ from pymc_core.protocol.constants import (
|
||||
PH_TYPE_SHIFT,
|
||||
ROUTE_TYPE_DIRECT,
|
||||
ROUTE_TYPE_FLOOD,
|
||||
ROUTE_TYPE_TRANSPORT_FLOOD,
|
||||
ROUTE_TYPE_TRANSPORT_DIRECT,
|
||||
|
||||
ROUTE_TYPE_TRANSPORT_FLOOD,
|
||||
)
|
||||
from pymc_core.protocol.packet_utils import PacketHeaderUtils, PacketTimingUtils
|
||||
from pymc_core.protocol.packet_utils import PacketHeaderUtils, PacketTimingUtils, PathUtils
|
||||
|
||||
from repeater.airtime import AirtimeManager
|
||||
from repeater.data_acquisition import StorageCollector
|
||||
@@ -51,15 +50,18 @@ class RepeaterHandler(BaseHandler):
|
||||
|
||||
return 0xFF # Special marker (not a real payload type)
|
||||
|
||||
def __init__(self, config: dict, dispatcher, local_hash: int, send_advert_func=None):
|
||||
def __init__(self, config: dict, dispatcher, local_hash: int, *, local_hash_bytes=None, send_advert_func=None):
|
||||
|
||||
self.config = config
|
||||
self.dispatcher = dispatcher
|
||||
self.local_hash = local_hash
|
||||
self.local_hash_bytes = local_hash_bytes or bytes([local_hash])
|
||||
self.send_advert_func = send_advert_func
|
||||
self.airtime_mgr = AirtimeManager(config)
|
||||
self.seen_packets = OrderedDict()
|
||||
self.cache_ttl = max(300, config.get("repeater", {}).get("cache_ttl", 3600)) # Min 5 min, default 1 hour
|
||||
self.cache_ttl = max(
|
||||
300, config.get("repeater", {}).get("cache_ttl", 3600)
|
||||
) # Min 5 min, default 1 hour
|
||||
self.max_cache_size = 1000
|
||||
self.tx_delay_factor = config.get("delays", {}).get("tx_delay_factor", 1.0)
|
||||
self.direct_tx_delay_factor = config.get("delays", {}).get("direct_tx_delay_factor", 0.5)
|
||||
@@ -118,10 +120,12 @@ class RepeaterHandler(BaseHandler):
|
||||
self._transport_keys_cache = None
|
||||
self._transport_keys_cache_time = 0
|
||||
self._transport_keys_cache_ttl = 60 # Cache for 60 seconds
|
||||
|
||||
|
||||
self._start_background_tasks()
|
||||
|
||||
async def __call__(self, packet: Packet, metadata: Optional[dict] = None, local_transmission: bool = False) -> None:
|
||||
async def __call__(
|
||||
self, packet: Packet, metadata: Optional[dict] = None, local_transmission: bool = False
|
||||
) -> None:
|
||||
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
@@ -147,12 +151,17 @@ class RepeaterHandler(BaseHandler):
|
||||
tx_delay_ms = 0.0
|
||||
drop_reason = None
|
||||
|
||||
original_path = list(packet.path) if packet.path else []
|
||||
original_path_hashes = packet.get_path_hashes_hex()
|
||||
path_hash_size = packet.get_path_hash_size()
|
||||
|
||||
# Process for forwarding (skip if in monitor mode or if this is a local transmission)
|
||||
result = None if (monitor_mode or local_transmission) else self.process_packet(processed_packet, snr)
|
||||
forwarded_path = None
|
||||
|
||||
result = (
|
||||
None
|
||||
if (monitor_mode or local_transmission)
|
||||
else self.process_packet(processed_packet, snr)
|
||||
)
|
||||
forwarded_path_hashes = None
|
||||
|
||||
# For local transmissions, create a direct transmission result
|
||||
if local_transmission and not monitor_mode:
|
||||
# Mark local packet as seen to prevent duplicate processing when received back
|
||||
@@ -160,48 +169,89 @@ class RepeaterHandler(BaseHandler):
|
||||
# Calculate transmission delay for local packets
|
||||
delay = self._calculate_tx_delay(packet, snr)
|
||||
result = (packet, delay)
|
||||
forwarded_path = list(packet.path) if packet.path else []
|
||||
forwarded_path_hashes = packet.get_path_hashes_hex()
|
||||
logger.debug(f"Local transmission: calculated delay {delay:.3f}s")
|
||||
|
||||
|
||||
if result:
|
||||
fwd_pkt, delay = result
|
||||
tx_delay_ms = delay * 1000.0
|
||||
|
||||
# Capture the forwarded path (after modification)
|
||||
forwarded_path = list(fwd_pkt.path) if fwd_pkt.path else []
|
||||
forwarded_path_hashes = fwd_pkt.get_path_hashes_hex()
|
||||
|
||||
# Check duty-cycle before scheduling TX
|
||||
airtime_ms = self.airtime_mgr.calculate_airtime(fwd_pkt.get_raw_length())
|
||||
|
||||
can_tx, wait_time = self.airtime_mgr.can_transmit(airtime_ms)
|
||||
|
||||
# LBT metadata (set after any TX path that awaits send)
|
||||
tx_metadata = None
|
||||
lbt_attempts = 0
|
||||
lbt_backoff_delays_ms = None
|
||||
lbt_channel_busy = False
|
||||
|
||||
if not can_tx:
|
||||
logger.warning(
|
||||
f"Duty-cycle limit exceeded. Airtime={airtime_ms:.1f}ms, "
|
||||
f"wait={wait_time:.1f}s before retry"
|
||||
)
|
||||
self.dropped_count += 1
|
||||
drop_reason = "Duty cycle limit"
|
||||
if local_transmission:
|
||||
# Defer local TX until duty cycle allows instead of dropping
|
||||
deferred_delay = delay + wait_time
|
||||
logger.info(
|
||||
f"Duty-cycle limit: deferring local TX by {wait_time:.1f}s "
|
||||
f"(airtime={airtime_ms:.1f}ms)"
|
||||
)
|
||||
self.forwarded_count += 1
|
||||
transmitted = True
|
||||
tx_task = await self.schedule_retransmit(
|
||||
fwd_pkt, deferred_delay, airtime_ms, local_transmission=True
|
||||
)
|
||||
try:
|
||||
await tx_task
|
||||
except Exception as e:
|
||||
self.forwarded_count -= 1
|
||||
transmitted = False
|
||||
drop_reason = "TX failed (deferred)"
|
||||
logger.warning(f"Deferred local TX failed: {e}")
|
||||
raise
|
||||
tx_metadata = getattr(fwd_pkt, "_tx_metadata", None)
|
||||
if tx_metadata:
|
||||
lbt_attempts = tx_metadata.get("lbt_attempts", 0)
|
||||
lbt_backoff_delays_ms = tx_metadata.get(
|
||||
"lbt_backoff_delays_ms", []
|
||||
)
|
||||
lbt_channel_busy = tx_metadata.get("lbt_channel_busy", False)
|
||||
if lbt_attempts > 0:
|
||||
total_lbt_delay = sum(lbt_backoff_delays_ms)
|
||||
logger.info(
|
||||
f"LBT: {lbt_attempts} attempts, "
|
||||
f"{total_lbt_delay:.0f}ms delay, "
|
||||
f"backoffs={lbt_backoff_delays_ms}"
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Duty-cycle limit exceeded. Airtime={airtime_ms:.1f}ms, "
|
||||
f"wait={wait_time:.1f}s before retry"
|
||||
)
|
||||
self.dropped_count += 1
|
||||
drop_reason = "Duty cycle limit"
|
||||
else:
|
||||
self.forwarded_count += 1
|
||||
transmitted = True
|
||||
# Schedule retransmit with delay (returns task)
|
||||
tx_task = await self.schedule_retransmit(fwd_pkt, delay, airtime_ms)
|
||||
|
||||
# Wait for transmission to complete to get LBT metadata
|
||||
await tx_task
|
||||
|
||||
# Extract LBT metadata after transmission
|
||||
tx_metadata = getattr(fwd_pkt, '_tx_metadata', None)
|
||||
lbt_attempts = 0
|
||||
lbt_backoff_delays_ms = None
|
||||
lbt_channel_busy = False
|
||||
|
||||
tx_task = await self.schedule_retransmit(
|
||||
fwd_pkt, delay, airtime_ms, local_transmission=local_transmission
|
||||
)
|
||||
try:
|
||||
await tx_task
|
||||
except Exception as e:
|
||||
self.forwarded_count -= 1
|
||||
transmitted = False
|
||||
drop_reason = "TX failed"
|
||||
logger.warning(f"Local TX failed: {e}")
|
||||
raise
|
||||
tx_metadata = getattr(fwd_pkt, "_tx_metadata", None)
|
||||
if tx_metadata:
|
||||
lbt_attempts = tx_metadata.get('lbt_attempts', 0)
|
||||
lbt_backoff_delays_ms = tx_metadata.get('lbt_backoff_delays_ms', [])
|
||||
lbt_channel_busy = tx_metadata.get('lbt_channel_busy', False)
|
||||
|
||||
lbt_attempts = tx_metadata.get("lbt_attempts", 0)
|
||||
lbt_backoff_delays_ms = tx_metadata.get("lbt_backoff_delays_ms", [])
|
||||
lbt_channel_busy = tx_metadata.get("lbt_channel_busy", False)
|
||||
|
||||
if lbt_attempts > 0:
|
||||
total_lbt_delay = sum(lbt_backoff_delays_ms)
|
||||
logger.info(
|
||||
@@ -215,7 +265,9 @@ class RepeaterHandler(BaseHandler):
|
||||
drop_reason = "Monitor mode"
|
||||
else:
|
||||
# Check if packet has a specific drop reason set by handlers
|
||||
drop_reason = processed_packet.drop_reason or self._get_drop_reason(processed_packet)
|
||||
drop_reason = processed_packet.drop_reason or self._get_drop_reason(
|
||||
processed_packet
|
||||
)
|
||||
logger.debug(f"Packet not forwarded: {drop_reason}")
|
||||
|
||||
# Extract packet type and route from header
|
||||
@@ -240,15 +292,14 @@ class RepeaterHandler(BaseHandler):
|
||||
drop_reason = "Duplicate"
|
||||
|
||||
path_hash = None
|
||||
display_path = (
|
||||
original_path if original_path else (list(packet.path) if packet.path else [])
|
||||
display_hashes = (
|
||||
original_path_hashes if original_path_hashes else packet.get_path_hashes_hex()
|
||||
)
|
||||
if display_path and len(display_path) > 0:
|
||||
# Format path as array of uppercase hex bytes
|
||||
path_bytes = [f"{b:02X}" for b in display_path[:8]] # First 8 bytes max
|
||||
if len(display_path) > 8:
|
||||
path_bytes.append("...")
|
||||
path_hash = "[" + ", ".join(path_bytes) + "]"
|
||||
if display_hashes:
|
||||
display = display_hashes[:8]
|
||||
if len(display_hashes) > 8:
|
||||
display = list(display) + ["..."]
|
||||
path_hash = "[" + ", ".join(display) + "]"
|
||||
|
||||
src_hash = None
|
||||
dst_hash = None
|
||||
@@ -294,13 +345,14 @@ class RepeaterHandler(BaseHandler):
|
||||
"path_hash": path_hash,
|
||||
"src_hash": src_hash,
|
||||
"dst_hash": dst_hash,
|
||||
"original_path": ([f"{b:02X}" for b in original_path] if original_path else None),
|
||||
"forwarded_path": (
|
||||
[f"{b:02X}" for b in forwarded_path] if forwarded_path is not None else None
|
||||
),
|
||||
"original_path": original_path_hashes or None,
|
||||
"forwarded_path": forwarded_path_hashes,
|
||||
"path_hash_size": path_hash_size,
|
||||
"raw_packet": packet.write_to().hex() if hasattr(packet, "write_to") else None,
|
||||
"lbt_attempts": lbt_attempts if transmitted else 0,
|
||||
"lbt_backoff_delays_ms": lbt_backoff_delays_ms if transmitted and lbt_backoff_delays_ms else None,
|
||||
"lbt_backoff_delays_ms": (
|
||||
lbt_backoff_delays_ms if transmitted and lbt_backoff_delays_ms else None
|
||||
),
|
||||
"lbt_channel_busy": lbt_channel_busy if transmitted else False,
|
||||
}
|
||||
|
||||
@@ -384,10 +436,11 @@ class RepeaterHandler(BaseHandler):
|
||||
return "Global flood policy disabled"
|
||||
|
||||
if route_type == ROUTE_TYPE_DIRECT:
|
||||
if not packet.path or len(packet.path) == 0:
|
||||
hash_size = packet.get_path_hash_size()
|
||||
if not packet.path or len(packet.path) < hash_size:
|
||||
return "Direct: no path"
|
||||
next_hop = packet.path[0]
|
||||
if next_hop != self.local_hash:
|
||||
next_hop = bytes(packet.path[:hash_size])
|
||||
if next_hop != self.local_hash_bytes[:hash_size]:
|
||||
return "Direct: not for us"
|
||||
|
||||
# Default reason
|
||||
@@ -414,7 +467,10 @@ class RepeaterHandler(BaseHandler):
|
||||
return False, "Empty payload"
|
||||
|
||||
if len(packet.path or []) >= MAX_PATH_SIZE:
|
||||
return False, f"Path length {len(packet.path or [])} exceeds MAX_PATH_SIZE ({MAX_PATH_SIZE})"
|
||||
return (
|
||||
False,
|
||||
f"Path length {len(packet.path or [])} exceeds MAX_PATH_SIZE ({MAX_PATH_SIZE})",
|
||||
)
|
||||
|
||||
return True, ""
|
||||
|
||||
@@ -454,11 +510,13 @@ class RepeaterHandler(BaseHandler):
|
||||
|
||||
try:
|
||||
from pymc_core.protocol.transport_keys import calc_transport_code
|
||||
|
||||
|
||||
# Check cache validity
|
||||
current_time = time.time()
|
||||
if (self._transport_keys_cache is None or
|
||||
current_time - self._transport_keys_cache_time > self._transport_keys_cache_ttl):
|
||||
if (
|
||||
self._transport_keys_cache is None
|
||||
or current_time - self._transport_keys_cache_time > self._transport_keys_cache_ttl
|
||||
):
|
||||
# Refresh cache
|
||||
self._transport_keys_cache = self.storage.get_transport_keys()
|
||||
self._transport_keys_cache_time = current_time
|
||||
@@ -471,14 +529,16 @@ class RepeaterHandler(BaseHandler):
|
||||
# Check if packet has transport codes
|
||||
if not packet.has_transport_codes():
|
||||
return False, "No transport codes present"
|
||||
|
||||
|
||||
transport_code_0 = packet.transport_codes[0] # First transport code
|
||||
|
||||
|
||||
payload = packet.get_payload()
|
||||
payload_type = packet.get_payload_type() if hasattr(packet, 'get_payload_type') else ((packet.header & 0x3C) >> 2)
|
||||
|
||||
payload_type = (
|
||||
packet.get_payload_type()
|
||||
if hasattr(packet, "get_payload_type")
|
||||
else ((packet.header & 0x3C) >> 2)
|
||||
)
|
||||
|
||||
# Check packet against each transport key
|
||||
for key_record in transport_keys:
|
||||
transport_key_encoded = key_record.get("transport_key")
|
||||
@@ -487,41 +547,48 @@ class RepeaterHandler(BaseHandler):
|
||||
|
||||
if not transport_key_encoded:
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
import base64
|
||||
|
||||
transport_key = base64.b64decode(transport_key_encoded)
|
||||
expected_code = calc_transport_code(transport_key, packet)
|
||||
if transport_code_0 == expected_code:
|
||||
logger.debug(f"Transport code validated for key '{key_name}' with policy '{flood_policy}'")
|
||||
|
||||
logger.debug(
|
||||
f"Transport code validated for key '{key_name}' with policy '{flood_policy}'"
|
||||
)
|
||||
|
||||
# Update last_used timestamp for this key
|
||||
try:
|
||||
key_id = key_record.get("id")
|
||||
if key_id:
|
||||
self.storage.update_transport_key(
|
||||
key_id=key_id,
|
||||
last_used=time.time()
|
||||
key_id=key_id, last_used=time.time()
|
||||
)
|
||||
logger.debug(
|
||||
f"Updated last_used timestamp for transport key '{key_name}'"
|
||||
)
|
||||
logger.debug(f"Updated last_used timestamp for transport key '{key_name}'")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update last_used for transport key '{key_name}': {e}")
|
||||
|
||||
logger.warning(
|
||||
f"Failed to update last_used for transport key '{key_name}': {e}"
|
||||
)
|
||||
|
||||
# Check flood policy for this key
|
||||
if flood_policy == "allow":
|
||||
return True, ""
|
||||
else:
|
||||
return False, f"Transport key '{key_name}' flood policy denied"
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error checking transport key '{key_name}': {e}")
|
||||
continue
|
||||
|
||||
|
||||
# No matching transport code found
|
||||
logger.debug(f"Transport code 0x{transport_code_0:04X} denied (checked {len(transport_keys)} keys)")
|
||||
logger.debug(
|
||||
f"Transport code 0x{transport_code_0:04X} denied (checked {len(transport_keys)} keys)"
|
||||
)
|
||||
return False, "No matching transport code"
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Transport code validation error: {e}")
|
||||
return False, f"Transport code validation error: {e}"
|
||||
@@ -564,16 +631,30 @@ class RepeaterHandler(BaseHandler):
|
||||
if self.is_duplicate(packet):
|
||||
packet.drop_reason = "Duplicate"
|
||||
return None
|
||||
|
||||
self.mark_seen(packet)
|
||||
|
||||
if packet.path is None:
|
||||
packet.path = bytearray()
|
||||
elif not isinstance(packet.path, bytearray):
|
||||
packet.path = bytearray(packet.path)
|
||||
|
||||
packet.path.append(self.local_hash)
|
||||
packet.path_len = len(packet.path)
|
||||
hash_size = packet.get_path_hash_size()
|
||||
hop_count = packet.get_path_hash_count()
|
||||
|
||||
# path_len encodes hop count in 6 bits (0-63); adding ourselves must not exceed 63
|
||||
if hop_count >= 63:
|
||||
packet.drop_reason = "Path hop count at maximum (63), cannot append"
|
||||
return None
|
||||
|
||||
# Check path won't exceed MAX_PATH_SIZE after append
|
||||
if (hop_count + 1) * hash_size > MAX_PATH_SIZE:
|
||||
packet.drop_reason = "Path would exceed MAX_PATH_SIZE"
|
||||
return None
|
||||
|
||||
self.mark_seen(packet)
|
||||
|
||||
# Append hash_size bytes from our public key prefix
|
||||
packet.path.extend(self.local_hash_bytes[:hash_size])
|
||||
packet.path_len = PathUtils.encode_path_len(hash_size, hop_count + 1)
|
||||
|
||||
return packet
|
||||
|
||||
@@ -591,13 +672,16 @@ class RepeaterHandler(BaseHandler):
|
||||
packet.drop_reason = "Marked do not retransmit"
|
||||
return None
|
||||
|
||||
hash_size = packet.get_path_hash_size()
|
||||
hop_count = packet.get_path_hash_count()
|
||||
|
||||
# Check if we're the next hop
|
||||
if not packet.path or len(packet.path) == 0:
|
||||
if not packet.path or len(packet.path) < hash_size:
|
||||
packet.drop_reason = "Direct: no path"
|
||||
return None
|
||||
|
||||
next_hop = packet.path[0]
|
||||
if next_hop != self.local_hash:
|
||||
next_hop = bytes(packet.path[:hash_size])
|
||||
if next_hop != self.local_hash_bytes[:hash_size]:
|
||||
packet.drop_reason = "Direct: not for us"
|
||||
return None
|
||||
|
||||
@@ -608,8 +692,10 @@ class RepeaterHandler(BaseHandler):
|
||||
|
||||
self.mark_seen(packet)
|
||||
|
||||
packet.path = bytearray(packet.path[1:])
|
||||
packet.path_len = len(packet.path)
|
||||
original_path = list(packet.path)
|
||||
# Remove first hash entry (hash_size bytes)
|
||||
packet.path = bytearray(packet.path[hash_size:])
|
||||
packet.path_len = PathUtils.encode_path_len(hash_size, hop_count - 1)
|
||||
|
||||
return packet
|
||||
|
||||
@@ -709,22 +795,43 @@ class RepeaterHandler(BaseHandler):
|
||||
packet.drop_reason = f"Unknown route type: {route_type}"
|
||||
return None
|
||||
|
||||
async def schedule_retransmit(self, fwd_pkt: Packet, delay: float, airtime_ms: float = 0.0):
|
||||
"""Schedule a packet retransmission with delay and return the task."""
|
||||
async def schedule_retransmit(
|
||||
self,
|
||||
fwd_pkt: Packet,
|
||||
delay: float,
|
||||
airtime_ms: float = 0.0,
|
||||
local_transmission: bool = False,
|
||||
):
|
||||
"""Schedule a packet retransmission with delay and return the task.
|
||||
|
||||
If local_transmission is True and the first send fails, retry once after
|
||||
a short delay (handles transient radio/LBT failures).
|
||||
"""
|
||||
|
||||
async def delayed_send():
|
||||
await asyncio.sleep(delay)
|
||||
try:
|
||||
await self.dispatcher.send_packet(fwd_pkt, wait_for_ack=False)
|
||||
|
||||
# Record airtime after successful TX
|
||||
if airtime_ms > 0:
|
||||
self.airtime_mgr.record_tx(airtime_ms)
|
||||
packet_size = fwd_pkt.get_raw_length()
|
||||
logger.info(
|
||||
f"Retransmitted packet ({packet_size} bytes, {airtime_ms:.1f}ms airtime)"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Retransmit failed: {e}")
|
||||
last_error = None
|
||||
for attempt in range(2 if local_transmission else 1):
|
||||
try:
|
||||
await self.dispatcher.send_packet(fwd_pkt, wait_for_ack=False)
|
||||
if airtime_ms > 0:
|
||||
self.airtime_mgr.record_tx(airtime_ms)
|
||||
packet_size = fwd_pkt.get_raw_length()
|
||||
logger.info(
|
||||
f"Retransmitted packet ({packet_size} bytes, "
|
||||
f"{airtime_ms:.1f}ms airtime)"
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
logger.error(f"Retransmit failed: {e}")
|
||||
if local_transmission and attempt == 0:
|
||||
logger.info("Retrying local TX in 1s...")
|
||||
await asyncio.sleep(1.0)
|
||||
else:
|
||||
raise
|
||||
if last_error is not None:
|
||||
raise last_error
|
||||
|
||||
return asyncio.create_task(delayed_send())
|
||||
|
||||
@@ -787,7 +894,9 @@ class RepeaterHandler(BaseHandler):
|
||||
"mode": repeater_config.get("mode", "forward"),
|
||||
"use_score_for_tx": repeater_config.get("use_score_for_tx", False),
|
||||
"score_threshold": repeater_config.get("score_threshold", 0.3),
|
||||
"send_advert_interval_hours": repeater_config.get("send_advert_interval_hours", 10),
|
||||
"send_advert_interval_hours": repeater_config.get(
|
||||
"send_advert_interval_hours", 10
|
||||
),
|
||||
"latitude": repeater_config.get("latitude", 0.0),
|
||||
"longitude": repeater_config.get("longitude", 0.0),
|
||||
"max_flood_hops": repeater_config.get("max_flood_hops", 3),
|
||||
@@ -796,7 +905,9 @@ class RepeaterHandler(BaseHandler):
|
||||
"advert_penalty_box": repeater_config.get("advert_penalty_box", {}),
|
||||
"advert_adaptive": repeater_config.get("advert_adaptive", {}),
|
||||
},
|
||||
"radio": self.config.get("radio", {}), # Read from live config, not cached radio_config
|
||||
"radio": self.config.get(
|
||||
"radio", {}
|
||||
), # Read from live config, not cached radio_config
|
||||
"duty_cycle": {
|
||||
"max_airtime_percent": max_duty_cycle_percent,
|
||||
"enforcement_enabled": duty_cycle_config.get("enforcement_enabled", True),
|
||||
@@ -854,7 +965,10 @@ class RepeaterHandler(BaseHandler):
|
||||
return
|
||||
|
||||
try:
|
||||
noise_floor = self.get_noise_floor()
|
||||
# Run in executor so KISS modem's blocking _send_command (up to 5s timeout)
|
||||
# does not block the event loop and hang the process / delay Ctrl+C.
|
||||
loop = asyncio.get_running_loop()
|
||||
noise_floor = await loop.run_in_executor(None, self.get_noise_floor)
|
||||
if noise_floor is not None:
|
||||
self.storage.record_noise_floor(noise_floor)
|
||||
logger.debug(f"Recorded noise floor: {noise_floor} dBm")
|
||||
@@ -900,8 +1014,10 @@ class RepeaterHandler(BaseHandler):
|
||||
try:
|
||||
# Refresh delay factors
|
||||
self.tx_delay_factor = self.config.get("delays", {}).get("tx_delay_factor", 1.0)
|
||||
self.direct_tx_delay_factor = self.config.get("delays", {}).get("direct_tx_delay_factor", 0.5)
|
||||
|
||||
self.direct_tx_delay_factor = self.config.get("delays", {}).get(
|
||||
"direct_tx_delay_factor", 0.5
|
||||
)
|
||||
|
||||
# Refresh repeater settings
|
||||
repeater_config = self.config.get("repeater", {})
|
||||
self.use_score_for_tx = repeater_config.get("use_score_for_tx", False)
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
"""Handler helper modules for pyMC Repeater."""
|
||||
|
||||
from .trace import TraceHelper
|
||||
from .discovery import DiscoveryHelper
|
||||
from .advert import AdvertHelper
|
||||
from .discovery import DiscoveryHelper
|
||||
from .login import LoginHelper
|
||||
from .text import TextHelper
|
||||
from .path import PathHelper
|
||||
from .protocol_request import ProtocolRequestHelper
|
||||
from .text import TextHelper
|
||||
from .trace import TraceHelper
|
||||
|
||||
__all__ = ["TraceHelper", "DiscoveryHelper", "AdvertHelper", "LoginHelper", "TextHelper", "PathHelper", "ProtocolRequestHelper"]
|
||||
__all__ = [
|
||||
"TraceHelper",
|
||||
"DiscoveryHelper",
|
||||
"AdvertHelper",
|
||||
"LoginHelper",
|
||||
"TextHelper",
|
||||
"PathHelper",
|
||||
"ProtocolRequestHelper",
|
||||
]
|
||||
|
||||
@@ -58,7 +58,7 @@ class ACL:
|
||||
sync_since: int = None,
|
||||
target_identity_hash: int = None,
|
||||
target_identity_name: str = None,
|
||||
target_identity_config: dict = None
|
||||
target_identity_config: dict = None,
|
||||
) -> tuple[bool, int]:
|
||||
|
||||
target_identity_config = target_identity_config or {}
|
||||
@@ -79,9 +79,11 @@ class ACL:
|
||||
# Empty strings are treated as "not set"
|
||||
admin_pwd = identity_settings.get("admin_password") or None
|
||||
guest_pwd = identity_settings.get("guest_password") or None
|
||||
|
||||
|
||||
if not admin_pwd and not guest_pwd:
|
||||
logger.error(f"Room server '{target_identity_name}' has no passwords configured! Set admin_password and/or guest_password in settings.")
|
||||
logger.error(
|
||||
f"Room server '{target_identity_name}' has no passwords configured! Set admin_password and/or guest_password in settings."
|
||||
)
|
||||
return False, 0
|
||||
else:
|
||||
# Repeater uses global passwords from its own security section
|
||||
@@ -91,10 +93,12 @@ class ACL:
|
||||
f"Repeater passwords - admin: {'SET' if admin_pwd else 'NONE'}, "
|
||||
f"guest: {'SET' if guest_pwd else 'NONE'}"
|
||||
)
|
||||
|
||||
|
||||
if target_identity_name:
|
||||
logger.debug(f"Authenticating for identity '{target_identity_name}' (room_server={is_room_server})")
|
||||
|
||||
logger.debug(
|
||||
f"Authenticating for identity '{target_identity_name}' (room_server={is_room_server})"
|
||||
)
|
||||
|
||||
pub_key = client_identity.get_public_key()[:PUB_KEY_SIZE]
|
||||
|
||||
if not password:
|
||||
@@ -111,8 +115,12 @@ class ACL:
|
||||
|
||||
permissions = 0
|
||||
logger.debug(f"Comparing password (len={len(password)}) against admin/guest")
|
||||
logger.debug(f"Admin pwd len={len(admin_pwd) if admin_pwd else 0}, Guest pwd len={len(guest_pwd) if guest_pwd else 0}")
|
||||
logger.debug(f"Password comparison: '{password}' vs admin='{admin_pwd[:4]}...' ({len(admin_pwd)} chars)")
|
||||
logger.debug(
|
||||
f"Admin pwd len={len(admin_pwd) if admin_pwd else 0}, Guest pwd len={len(guest_pwd) if guest_pwd else 0}"
|
||||
)
|
||||
logger.debug(
|
||||
f"Password comparison: '{password}' vs admin='{admin_pwd[:4]}...' ({len(admin_pwd)} chars)"
|
||||
)
|
||||
if admin_pwd and password == admin_pwd:
|
||||
permissions = PERM_ACL_ADMIN
|
||||
logger.info(f"Admin password validated for '{target_identity_name or 'unknown'}'")
|
||||
|
||||
@@ -5,6 +5,7 @@ This module processes advertisement packets for neighbor tracking and discovery.
|
||||
Includes adaptive rate limiting based on mesh activity.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from enum import Enum
|
||||
@@ -534,16 +535,22 @@ class AdvertHelper:
|
||||
if pubkey == local_pubkey:
|
||||
logger.debug("Ignoring own advert in neighbor tracking")
|
||||
return
|
||||
|
||||
|
||||
# Get route type from packet header
|
||||
from pymc_core.protocol.constants import PH_ROUTE_MASK
|
||||
|
||||
route_type = packet.header & PH_ROUTE_MASK
|
||||
|
||||
# Check if this is a new neighbor
|
||||
|
||||
# Check if this is a new neighbor (run DB read in thread to avoid blocking event loop)
|
||||
current_time = now
|
||||
if pubkey not in self._known_neighbors:
|
||||
# Only check database if not in cache
|
||||
current_neighbors = self.storage.get_neighbors() if self.storage else {}
|
||||
if self.storage:
|
||||
current_neighbors = await asyncio.to_thread(
|
||||
self.storage.get_neighbors
|
||||
)
|
||||
else:
|
||||
current_neighbors = {}
|
||||
is_new_neighbor = pubkey not in current_neighbors
|
||||
|
||||
if is_new_neighbor:
|
||||
@@ -573,10 +580,14 @@ class AdvertHelper:
|
||||
"zero_hop": zero_hop,
|
||||
}
|
||||
|
||||
# Store to database
|
||||
# Store to database (run in thread so event loop stays responsive;
|
||||
# blocking here can cause companion TCP clients to disconnect)
|
||||
if self.storage:
|
||||
try:
|
||||
self.storage.record_advert(advert_record)
|
||||
await asyncio.to_thread(
|
||||
self.storage.record_advert,
|
||||
advert_record,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store advert record: {e}")
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ allowing other nodes to discover repeaters on the mesh network.
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pymc_core.node.handlers.control import ControlHandler
|
||||
|
||||
logger = logging.getLogger("DiscoveryHelper")
|
||||
@@ -21,6 +22,7 @@ class DiscoveryHelper:
|
||||
packet_injector=None,
|
||||
node_type: int = 2,
|
||||
log_fn=None,
|
||||
debug_log_fn=None,
|
||||
):
|
||||
"""
|
||||
Initialize the discovery helper.
|
||||
@@ -30,13 +32,18 @@ class DiscoveryHelper:
|
||||
packet_injector: Callable to inject new packets into the router for sending
|
||||
node_type: Node type identifier (2 = Repeater)
|
||||
log_fn: Optional logging function for ControlHandler
|
||||
debug_log_fn: Optional logging for verbose ControlHandler messages (e.g. callback
|
||||
presence). Pass logger.debug to avoid INFO noise when forwarding to companions.
|
||||
"""
|
||||
self.local_identity = local_identity
|
||||
self.packet_injector = packet_injector # Function to inject packets into router
|
||||
self.node_type = node_type
|
||||
|
||||
|
||||
# Create ControlHandler internally as a parsing utility
|
||||
self.control_handler = ControlHandler(log_fn=log_fn or logger.info)
|
||||
self.control_handler = ControlHandler(
|
||||
log_fn=log_fn or logger.info,
|
||||
debug_log_fn=debug_log_fn,
|
||||
)
|
||||
|
||||
# Set up the request callback
|
||||
self.control_handler.set_request_callback(self._on_discovery_request)
|
||||
|
||||
@@ -8,6 +8,7 @@ import asyncio
|
||||
import logging
|
||||
|
||||
from pymc_core.node.handlers.login_server import LoginServerHandler
|
||||
from pymc_core.protocol.constants import PAYLOAD_TYPE_ANON_REQ
|
||||
|
||||
logger = logging.getLogger("LoginHelper")
|
||||
|
||||
@@ -22,9 +23,11 @@ class LoginHelper:
|
||||
self.handlers = {}
|
||||
self.acls = {} # Per-identity ACLs keyed by hash_byte
|
||||
|
||||
def register_identity(self, name: str, identity, identity_type: str = "room_server", config: dict = None):
|
||||
def register_identity(
|
||||
self, name: str, identity, identity_type: str = "room_server", config: dict = None
|
||||
):
|
||||
config = config or {}
|
||||
|
||||
|
||||
hash_byte = identity.get_public_key()[0]
|
||||
|
||||
# Create ACL for this identity
|
||||
@@ -79,9 +82,11 @@ class LoginHelper:
|
||||
|
||||
self.acls[hash_byte] = identity_acl
|
||||
logger.info(f"Created ACL for {identity_type} '{name}': hash=0x{hash_byte:02X}")
|
||||
|
||||
|
||||
# Create auth callback that uses this identity's ACL
|
||||
def auth_callback_with_context(client_identity, shared_secret, password, timestamp, sync_since=None):
|
||||
def auth_callback_with_context(
|
||||
client_identity, shared_secret, password, timestamp, sync_since=None
|
||||
):
|
||||
return identity_acl.authenticate_client(
|
||||
client_identity=client_identity,
|
||||
shared_secret=shared_secret,
|
||||
@@ -90,9 +95,9 @@ class LoginHelper:
|
||||
sync_since=sync_since,
|
||||
target_identity_hash=hash_byte,
|
||||
target_identity_name=name,
|
||||
target_identity_config=config
|
||||
target_identity_config=config,
|
||||
)
|
||||
|
||||
|
||||
handler = LoginServerHandler(
|
||||
local_identity=identity,
|
||||
log_fn=self.log_fn,
|
||||
@@ -103,11 +108,9 @@ class LoginHelper:
|
||||
handler.set_send_packet_callback(self._send_packet_with_delay)
|
||||
|
||||
self.handlers[hash_byte] = handler
|
||||
|
||||
|
||||
logger.info(f"Registered {identity_type} '{name}' login handler: hash=0x{hash_byte:02X}")
|
||||
|
||||
|
||||
|
||||
async def process_login_packet(self, packet):
|
||||
|
||||
try:
|
||||
@@ -123,9 +126,14 @@ class LoginHelper:
|
||||
packet.mark_do_not_retransmit()
|
||||
return True
|
||||
else:
|
||||
logger.debug(f"No login handler registered for hash 0x{dest_hash:02X}, allowing forward")
|
||||
# ANON_REQ to other nodes (e.g. owner-info to firmware) is normal; skip log to avoid spam
|
||||
ptype = getattr(packet, "get_payload_type", lambda: None)()
|
||||
if ptype != PAYLOAD_TYPE_ANON_REQ:
|
||||
logger.debug(
|
||||
f"No login handler registered for hash 0x{dest_hash:02X}, allowing forward"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing login packet: {e}")
|
||||
return False
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import logging
|
||||
from typing import Optional, Dict, Any, Callable
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -10,15 +11,15 @@ logger = logging.getLogger(__name__)
|
||||
class MeshCLI:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_path: str,
|
||||
config: Dict[str, Any],
|
||||
self,
|
||||
config_path: str,
|
||||
config: Dict[str, Any],
|
||||
config_manager, # ConfigManager instance for save & live updates
|
||||
identity_type: str = "repeater",
|
||||
enable_regions: bool = True,
|
||||
send_advert_callback: Optional[Callable] = None,
|
||||
identity = None,
|
||||
storage_handler = None
|
||||
identity=None,
|
||||
storage_handler=None,
|
||||
):
|
||||
|
||||
self.config_path = Path(config_path)
|
||||
@@ -29,39 +30,39 @@ class MeshCLI:
|
||||
self.send_advert_callback = send_advert_callback
|
||||
self.identity = identity
|
||||
self.storage_handler = storage_handler
|
||||
|
||||
|
||||
# Get repeater config shortcut
|
||||
self.repeater_config = config.get('repeater', {})
|
||||
|
||||
self.repeater_config = config.get("repeater", {})
|
||||
|
||||
def handle_command(self, sender_pubkey: bytes, command: str, is_admin: bool) -> str:
|
||||
|
||||
# Check admin permission first
|
||||
if not is_admin:
|
||||
return "Error: Admin permission required"
|
||||
|
||||
|
||||
logger.debug(f"handle_command received: '{command}' (len={len(command)})")
|
||||
|
||||
|
||||
# Extract optional sequence prefix (XX|)
|
||||
prefix = ""
|
||||
if len(command) > 4 and command[2] == '|':
|
||||
if len(command) > 4 and command[2] == "|":
|
||||
prefix = command[:3]
|
||||
command = command[3:]
|
||||
logger.debug(f"Extracted prefix: '{prefix}', remaining command: '{command}'")
|
||||
|
||||
|
||||
# Strip leading/trailing whitespace
|
||||
command = command.strip()
|
||||
logger.debug(f"After strip: '{command}'")
|
||||
|
||||
|
||||
# Route to appropriate handler
|
||||
reply = self._route_command(command)
|
||||
|
||||
|
||||
# Add prefix back to reply if present
|
||||
if prefix:
|
||||
return prefix + reply
|
||||
return reply
|
||||
|
||||
|
||||
def _route_command(self, command: str) -> str:
|
||||
|
||||
|
||||
# System commands
|
||||
if command == "reboot":
|
||||
return self._cmd_reboot()
|
||||
@@ -79,97 +80,98 @@ class MeshCLI:
|
||||
return self._cmd_clear_stats()
|
||||
elif command == "ver":
|
||||
return self._cmd_version()
|
||||
|
||||
|
||||
# Get commands
|
||||
elif command.startswith("get "):
|
||||
return self._cmd_get(command[4:])
|
||||
|
||||
|
||||
# Set commands
|
||||
elif command.startswith("set "):
|
||||
return self._cmd_set(command[4:])
|
||||
|
||||
|
||||
# ACL commands
|
||||
elif command.startswith("setperm "):
|
||||
return self._cmd_setperm(command)
|
||||
elif command == "get acl":
|
||||
return "Error: Use 'get acl' via serial console only"
|
||||
|
||||
|
||||
# Region commands (repeaters only)
|
||||
elif command.startswith("region"):
|
||||
if self.enable_regions:
|
||||
return self._cmd_region(command)
|
||||
else:
|
||||
return "Error: Region commands not available for room servers"
|
||||
|
||||
|
||||
# Neighbor commands
|
||||
elif command == "neighbors":
|
||||
return self._cmd_neighbors()
|
||||
elif command.startswith("neighbor.remove "):
|
||||
return self._cmd_neighbor_remove(command)
|
||||
|
||||
|
||||
# Temporary radio params
|
||||
elif command.startswith("tempradio "):
|
||||
return self._cmd_tempradio(command)
|
||||
|
||||
|
||||
# Sensor commands
|
||||
elif command.startswith("sensor "):
|
||||
return "Error: Sensor commands not implemented in Python repeater"
|
||||
|
||||
|
||||
# GPS commands
|
||||
elif command.startswith("gps"):
|
||||
return "Error: GPS commands not implemented in Python repeater"
|
||||
|
||||
|
||||
# Logging commands
|
||||
elif command.startswith("log "):
|
||||
return self._cmd_log(command)
|
||||
|
||||
|
||||
# Statistics commands
|
||||
elif command.startswith("stats-"):
|
||||
return "Error: Stats commands not fully implemented yet"
|
||||
|
||||
|
||||
else:
|
||||
return "Unknown command"
|
||||
|
||||
|
||||
# ==================== System Commands ====================
|
||||
|
||||
|
||||
def _cmd_reboot(self) -> str:
|
||||
"""Reboot the repeater process."""
|
||||
from repeater.service_utils import restart_service
|
||||
|
||||
|
||||
logger.warning("Reboot command received via mesh CLI")
|
||||
success, message = restart_service()
|
||||
|
||||
|
||||
if success:
|
||||
return f"OK - {message}"
|
||||
else:
|
||||
return f"Error: {message}"
|
||||
|
||||
|
||||
def _cmd_advert(self) -> str:
|
||||
"""Send self advertisement."""
|
||||
if not self.send_advert_callback:
|
||||
logger.warning("Advert command received but no callback configured")
|
||||
return "Error: Advert functionality not configured"
|
||||
|
||||
|
||||
try:
|
||||
import asyncio
|
||||
|
||||
|
||||
async def delayed_advert():
|
||||
"""Delay advert to let CLI response send first (matches C++ 1500ms delay)."""
|
||||
await asyncio.sleep(1.5)
|
||||
await self.send_advert_callback()
|
||||
|
||||
|
||||
asyncio.create_task(delayed_advert())
|
||||
logger.info("Advert scheduled for sending (1.5s delay)")
|
||||
return "OK - Advert sent"
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to schedule advert: {e}", exc_info=True)
|
||||
return f"Error: {e}"
|
||||
|
||||
|
||||
def _cmd_clock(self, command: str) -> str:
|
||||
"""Handle clock commands."""
|
||||
if command == "clock":
|
||||
# Display current time
|
||||
import datetime
|
||||
|
||||
dt = datetime.datetime.utcnow()
|
||||
return f"{dt.hour:02d}:{dt.minute:02d} - {dt.day}/{dt.month}/{dt.year} UTC"
|
||||
elif command == "clock sync":
|
||||
@@ -177,91 +179,94 @@ class MeshCLI:
|
||||
return "OK - clock sync not needed (system time used)"
|
||||
else:
|
||||
return "Unknown clock command"
|
||||
|
||||
|
||||
def _cmd_time(self, command: str) -> str:
|
||||
"""Set time - not supported in Python (use system time)."""
|
||||
return "Error: Time setting not supported (system time is used)"
|
||||
|
||||
|
||||
def _cmd_password(self, command: str) -> str:
|
||||
"""Change admin password."""
|
||||
new_password = command[9:].strip()
|
||||
|
||||
|
||||
if not new_password:
|
||||
return "Error: Password cannot be empty"
|
||||
|
||||
|
||||
# Update security config
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
|
||||
self.config['security']['password'] = new_password
|
||||
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
|
||||
self.config["security"]["password"] = new_password
|
||||
|
||||
# Save config and live update
|
||||
try:
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['security'])
|
||||
saved, err = self.config_manager.save_to_file()
|
||||
if not saved:
|
||||
logger.error(f"Failed to save password: {err}")
|
||||
return f"Error: Failed to save config: {err}"
|
||||
self.config_manager.live_update_daemon(["security"])
|
||||
return f"password now: {new_password}"
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save password: {e}")
|
||||
return "Error: Failed to save password"
|
||||
|
||||
|
||||
def _cmd_clear_stats(self) -> str:
|
||||
"""Clear statistics."""
|
||||
# TODO: Implement stats clearing
|
||||
return "Error: Not yet implemented"
|
||||
|
||||
|
||||
def _cmd_version(self) -> str:
|
||||
"""Get version information."""
|
||||
role = "room_server" if self.identity_type == "room_server" else "repeater"
|
||||
version = self.config.get('version', '1.0.0')
|
||||
version = self.config.get("version", "1.0.0")
|
||||
return f"pyMC_{role} v{version}"
|
||||
|
||||
|
||||
# ==================== Get Commands ====================
|
||||
|
||||
|
||||
def _cmd_get(self, param: str) -> str:
|
||||
"""Handle get commands."""
|
||||
param = param.strip()
|
||||
logger.debug(f"_cmd_get called with param: '{param}' (len={len(param)})")
|
||||
|
||||
|
||||
if param == "af":
|
||||
af = self.repeater_config.get('airtime_factor', 1.0)
|
||||
af = self.repeater_config.get("airtime_factor", 1.0)
|
||||
return f"> {af}"
|
||||
|
||||
|
||||
elif param == "name":
|
||||
name = self.repeater_config.get('name', 'Unknown')
|
||||
name = self.repeater_config.get("name", "Unknown")
|
||||
return f"> {name}"
|
||||
|
||||
|
||||
elif param == "repeat":
|
||||
disabled = self.repeater_config.get('disable_forward', False)
|
||||
disabled = self.repeater_config.get("disable_forward", False)
|
||||
return f"> {'off' if disabled else 'on'}"
|
||||
|
||||
|
||||
elif param == "lat":
|
||||
lat = self.repeater_config.get('latitude', 0.0)
|
||||
lat = self.repeater_config.get("latitude", 0.0)
|
||||
return f"> {lat}"
|
||||
|
||||
|
||||
elif param == "lon":
|
||||
lon = self.repeater_config.get('longitude', 0.0)
|
||||
lon = self.repeater_config.get("longitude", 0.0)
|
||||
return f"> {lon}"
|
||||
|
||||
|
||||
elif param == "radio":
|
||||
radio = self.config.get('radio', {})
|
||||
freq_hz = radio.get('frequency', 915000000)
|
||||
bw_hz = radio.get('bandwidth', 125000)
|
||||
sf = radio.get('spreading_factor', 7)
|
||||
cr = radio.get('coding_rate', 5)
|
||||
radio = self.config.get("radio", {})
|
||||
freq_hz = radio.get("frequency", 915000000)
|
||||
bw_hz = radio.get("bandwidth", 125000)
|
||||
sf = radio.get("spreading_factor", 7)
|
||||
cr = radio.get("coding_rate", 5)
|
||||
# Convert Hz to MHz for freq, Hz to kHz for bandwidth (match C++ ftoa output)
|
||||
freq_mhz = freq_hz / 1_000_000.0
|
||||
bw_khz = bw_hz / 1_000.0
|
||||
return f"> {freq_mhz},{bw_khz},{sf},{cr}"
|
||||
|
||||
|
||||
elif param == "freq":
|
||||
freq_hz = self.config.get('radio', {}).get('frequency', 915000000)
|
||||
freq_hz = self.config.get("radio", {}).get("frequency", 915000000)
|
||||
freq_mhz = freq_hz / 1_000_000.0
|
||||
return f"> {freq_mhz}"
|
||||
|
||||
|
||||
elif param == "tx":
|
||||
power = self.config.get('radio', {}).get('tx_power', 20)
|
||||
power = self.config.get("radio", {}).get("tx_power", 20)
|
||||
return f"> {power}"
|
||||
|
||||
|
||||
elif param == "public.key":
|
||||
if not self.identity:
|
||||
return "Error: Identity not available"
|
||||
@@ -272,263 +277,263 @@ class MeshCLI:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get public key: {e}")
|
||||
return f"Error: {e}"
|
||||
|
||||
|
||||
elif param == "role":
|
||||
role = "room_server" if self.identity_type == "room_server" else "repeater"
|
||||
return f"> {role}"
|
||||
|
||||
|
||||
elif param == "guest.password":
|
||||
guest_pw = self.config.get('security', {}).get('guest_password', '')
|
||||
guest_pw = self.config.get("security", {}).get("guest_password", "")
|
||||
return f"> {guest_pw}"
|
||||
|
||||
|
||||
elif param == "allow.read.only":
|
||||
allow = self.config.get('security', {}).get('allow_read_only', False)
|
||||
allow = self.config.get("security", {}).get("allow_read_only", False)
|
||||
return f"> {'on' if allow else 'off'}"
|
||||
|
||||
|
||||
elif param == "advert.interval":
|
||||
interval = self.repeater_config.get('advert_interval_minutes', 120)
|
||||
interval = self.repeater_config.get("advert_interval_minutes", 120)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
elif param == "flood.advert.interval":
|
||||
interval = self.repeater_config.get('flood_advert_interval_hours', 24)
|
||||
interval = self.repeater_config.get("flood_advert_interval_hours", 24)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
elif param == "flood.max":
|
||||
max_flood = self.repeater_config.get('max_flood_hops', 3)
|
||||
max_flood = self.repeater_config.get("max_flood_hops", 3)
|
||||
return f"> {max_flood}"
|
||||
|
||||
|
||||
elif param == "rxdelay":
|
||||
delay = self.repeater_config.get('rx_delay_base', 0.0)
|
||||
delay = self.repeater_config.get("rx_delay_base", 0.0)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "txdelay":
|
||||
delay = self.repeater_config.get('tx_delay_factor', 1.0)
|
||||
delay = self.repeater_config.get("tx_delay_factor", 1.0)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "direct.txdelay":
|
||||
delay = self.repeater_config.get('direct_tx_delay_factor', 0.5)
|
||||
delay = self.repeater_config.get("direct_tx_delay_factor", 0.5)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "multi.acks":
|
||||
acks = self.repeater_config.get('multi_acks', 0)
|
||||
acks = self.repeater_config.get("multi_acks", 0)
|
||||
return f"> {acks}"
|
||||
|
||||
|
||||
elif param == "int.thresh":
|
||||
thresh = self.repeater_config.get('interference_threshold', -120)
|
||||
thresh = self.repeater_config.get("interference_threshold", -120)
|
||||
return f"> {thresh}"
|
||||
|
||||
|
||||
elif param == "agc.reset.interval":
|
||||
interval = self.repeater_config.get('agc_reset_interval', 0)
|
||||
interval = self.repeater_config.get("agc_reset_interval", 0)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
else:
|
||||
return f"??: {param}"
|
||||
|
||||
|
||||
# ==================== Set Commands ====================
|
||||
|
||||
|
||||
def _cmd_set(self, param: str) -> str:
|
||||
"""Handle set commands."""
|
||||
parts = param.split(None, 1)
|
||||
if len(parts) < 2:
|
||||
return "Error: Missing value"
|
||||
|
||||
|
||||
key, value = parts[0], parts[1]
|
||||
|
||||
|
||||
try:
|
||||
if key == "af":
|
||||
self.repeater_config['airtime_factor'] = float(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["airtime_factor"] = float(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "name":
|
||||
self.repeater_config['node_name'] = value
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["node_name"] = value
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "repeat":
|
||||
disabled = value.lower() == "off"
|
||||
self.repeater_config['disable_forward'] = disabled
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["disable_forward"] = disabled
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return f"OK - repeat is now {'OFF' if disabled else 'ON'}"
|
||||
|
||||
|
||||
elif key == "lat":
|
||||
self.repeater_config['latitude'] = float(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["latitude"] = float(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "lon":
|
||||
self.repeater_config['longitude'] = float(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["longitude"] = float(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "radio":
|
||||
# Format: freq bw sf cr
|
||||
radio_parts = value.split()
|
||||
if len(radio_parts) != 4:
|
||||
return "Error: Expected freq bw sf cr"
|
||||
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
|
||||
self.config['radio']['frequency'] = float(radio_parts[0])
|
||||
self.config['radio']['bandwidth'] = float(radio_parts[1])
|
||||
self.config['radio']['spreading_factor'] = int(radio_parts[2])
|
||||
self.config['radio']['coding_rate'] = int(radio_parts[3])
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['radio'])
|
||||
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
|
||||
self.config["radio"]["frequency"] = float(radio_parts[0])
|
||||
self.config["radio"]["bandwidth"] = float(radio_parts[1])
|
||||
self.config["radio"]["spreading_factor"] = int(radio_parts[2])
|
||||
self.config["radio"]["coding_rate"] = int(radio_parts[3])
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["radio"])
|
||||
return "OK - restart repeater to apply"
|
||||
|
||||
|
||||
elif key == "freq":
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
self.config['radio']['frequency'] = float(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['radio'])
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
self.config["radio"]["frequency"] = float(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["radio"])
|
||||
return "OK - restart repeater to apply"
|
||||
|
||||
|
||||
elif key == "tx":
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
self.config['radio']['tx_power'] = int(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['radio'])
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
self.config["radio"]["tx_power"] = int(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["radio"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "guest.password":
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
self.config['security']['guest_password'] = value
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['security'])
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
self.config["security"]["guest_password"] = value
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["security"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "allow.read.only":
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
self.config['security']['allow_read_only'] = value.lower() == "on"
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['security'])
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
self.config["security"]["allow_read_only"] = value.lower() == "on"
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["security"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "advert.interval":
|
||||
mins = int(value)
|
||||
if mins > 0 and (mins < 60 or mins > 240):
|
||||
return "Error: interval range is 60-240 minutes"
|
||||
self.repeater_config['advert_interval_minutes'] = mins
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["advert_interval_minutes"] = mins
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "flood.advert.interval":
|
||||
hours = int(value)
|
||||
if (hours > 0 and hours < 3) or hours > 48:
|
||||
return "Error: interval range is 3-48 hours"
|
||||
self.repeater_config['flood_advert_interval_hours'] = hours
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["flood_advert_interval_hours"] = hours
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "flood.max":
|
||||
max_val = int(value)
|
||||
if max_val > 64:
|
||||
return "Error: max 64"
|
||||
self.repeater_config['max_flood_hops'] = max_val
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["max_flood_hops"] = max_val
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "rxdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['rx_delay_base'] = delay
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater', 'delays'])
|
||||
self.repeater_config["rx_delay_base"] = delay
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater", "delays"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "txdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['tx_delay_factor'] = delay
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater', 'delays'])
|
||||
self.repeater_config["tx_delay_factor"] = delay
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater", "delays"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "direct.txdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['direct_tx_delay_factor'] = delay
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater', 'delays'])
|
||||
self.repeater_config["direct_tx_delay_factor"] = delay
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater", "delays"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "multi.acks":
|
||||
self.repeater_config['multi_acks'] = int(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["multi_acks"] = int(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "int.thresh":
|
||||
self.repeater_config['interference_threshold'] = int(value)
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["interference_threshold"] = int(value)
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "agc.reset.interval":
|
||||
interval = int(value)
|
||||
# Round to nearest multiple of 4
|
||||
rounded = (interval // 4) * 4
|
||||
self.repeater_config['agc_reset_interval'] = rounded
|
||||
self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(['repeater'])
|
||||
self.repeater_config["agc_reset_interval"] = rounded
|
||||
saved, _ = self.config_manager.save_to_file()
|
||||
self.config_manager.live_update_daemon(["repeater"])
|
||||
return f"OK - interval rounded to {rounded}"
|
||||
|
||||
|
||||
else:
|
||||
return f"unknown config: {key}"
|
||||
|
||||
|
||||
except ValueError as e:
|
||||
return f"Error: invalid value - {e}"
|
||||
except Exception as e:
|
||||
logger.error(f"Set command error: {e}")
|
||||
return f"Error: {e}"
|
||||
|
||||
|
||||
# ==================== ACL Commands ====================
|
||||
|
||||
|
||||
def _cmd_setperm(self, command: str) -> str:
|
||||
"""Set permissions for a public key."""
|
||||
# Format: setperm {pubkey-hex} {permissions-int}
|
||||
parts = command[8:].split()
|
||||
if len(parts) < 2:
|
||||
return "Err - bad params"
|
||||
|
||||
|
||||
pubkey_hex = parts[0]
|
||||
try:
|
||||
permissions = int(parts[1])
|
||||
except ValueError:
|
||||
return "Err - invalid permissions"
|
||||
|
||||
|
||||
# TODO: Apply permissions via ACL
|
||||
logger.info(f"setperm command: {pubkey_hex} -> {permissions}")
|
||||
return "Error: Not yet implemented - use config file"
|
||||
|
||||
|
||||
# ==================== Region Commands ====================
|
||||
|
||||
|
||||
def _cmd_region(self, command: str) -> str:
|
||||
"""Handle region commands."""
|
||||
parts = command.split()
|
||||
|
||||
|
||||
if len(parts) == 1:
|
||||
return "Error: Region commands not implemented in Python repeater"
|
||||
|
||||
|
||||
subcommand = parts[1]
|
||||
|
||||
|
||||
if subcommand == "load":
|
||||
return "Error: Region commands not implemented"
|
||||
elif subcommand == "save":
|
||||
@@ -537,80 +542,82 @@ class MeshCLI:
|
||||
return "Error: Region commands not implemented"
|
||||
else:
|
||||
return "Err - ??"
|
||||
|
||||
|
||||
# ==================== Neighbor Commands ====================
|
||||
|
||||
|
||||
def _cmd_neighbors(self) -> str:
|
||||
"""List neighbors."""
|
||||
if not self.storage_handler:
|
||||
return "Error: Storage not available"
|
||||
|
||||
|
||||
try:
|
||||
neighbors = self.storage_handler.get_neighbors()
|
||||
|
||||
|
||||
if not neighbors:
|
||||
return "No neighbors discovered yet"
|
||||
|
||||
|
||||
# Filter to only show repeaters and zero hop nodes
|
||||
filtered_neighbors = {
|
||||
pubkey: info for pubkey, info in neighbors.items()
|
||||
if info.get('is_repeater', False) or info.get('zero_hop', False)
|
||||
pubkey: info
|
||||
for pubkey, info in neighbors.items()
|
||||
if info.get("is_repeater", False) or info.get("zero_hop", False)
|
||||
}
|
||||
|
||||
|
||||
if not filtered_neighbors:
|
||||
return "No repeaters or zero hop neighbors discovered yet"
|
||||
|
||||
|
||||
# Format output similar to C++ version
|
||||
# Format: "<pubkey_prefix> heard Xs ago"
|
||||
import time
|
||||
|
||||
current_time = int(time.time())
|
||||
|
||||
|
||||
lines = []
|
||||
for pubkey, info in filtered_neighbors.items():
|
||||
last_seen = info.get('last_seen', 0)
|
||||
last_seen = info.get("last_seen", 0)
|
||||
seconds_ago = int(current_time - last_seen)
|
||||
|
||||
|
||||
# Get first 4 bytes of pubkey as hex (match C++ format)
|
||||
pubkey_short = pubkey[:8] if len(pubkey) >= 8 else pubkey
|
||||
snr = info.get('snr', 0) or 0
|
||||
|
||||
snr = info.get("snr", 0) or 0
|
||||
|
||||
# Format: <4byte_hex>:<seconds_ago>:<snr> (matches C++ format)
|
||||
lines.append(f"{pubkey_short}:{seconds_ago}:{int(snr)}")
|
||||
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list neighbors: {e}", exc_info=True)
|
||||
return f"Error: {e}"
|
||||
|
||||
|
||||
def _cmd_neighbor_remove(self, command: str) -> str:
|
||||
"""Remove a neighbor."""
|
||||
pubkey_hex = command[16:].strip()
|
||||
|
||||
|
||||
if not pubkey_hex:
|
||||
return "ERR: Missing pubkey"
|
||||
|
||||
|
||||
# TODO: Remove neighbor from routing table
|
||||
logger.info(f"neighbor.remove: {pubkey_hex}")
|
||||
return "Error: Not yet implemented"
|
||||
|
||||
|
||||
# ==================== Temporary Radio Commands ====================
|
||||
|
||||
|
||||
def _cmd_tempradio(self, command: str) -> str:
|
||||
"""Apply temporary radio parameters."""
|
||||
# Format: tempradio {freq} {bw} {sf} {cr} {timeout_mins}
|
||||
parts = command[10:].split()
|
||||
|
||||
|
||||
if len(parts) < 5:
|
||||
return "Error: Expected freq bw sf cr timeout_mins"
|
||||
|
||||
|
||||
try:
|
||||
freq = float(parts[0])
|
||||
bw = float(parts[1])
|
||||
sf = int(parts[2])
|
||||
cr = int(parts[3])
|
||||
timeout_mins = int(parts[4])
|
||||
|
||||
|
||||
# Validate
|
||||
if not (300.0 <= freq <= 2500.0):
|
||||
return "Error: invalid frequency"
|
||||
@@ -622,16 +629,16 @@ class MeshCLI:
|
||||
return "Error: invalid coding rate"
|
||||
if timeout_mins <= 0:
|
||||
return "Error: invalid timeout"
|
||||
|
||||
|
||||
# TODO: Apply temporary radio parameters
|
||||
logger.info(f"tempradio: {freq}MHz {bw}kHz SF{sf} CR4/{cr} for {timeout_mins}min")
|
||||
return "Error: Not yet implemented"
|
||||
|
||||
|
||||
except ValueError:
|
||||
return "Error, invalid params"
|
||||
|
||||
|
||||
# ==================== Logging Commands ====================
|
||||
|
||||
|
||||
def _cmd_log(self, command: str) -> str:
|
||||
"""Handle log commands."""
|
||||
if command == "log start":
|
||||
|
||||
@@ -13,20 +13,20 @@ class PathHelper:
|
||||
async def process_path_packet(self, packet):
|
||||
|
||||
from pymc_core.protocol.crypto import CryptoUtils
|
||||
|
||||
|
||||
try:
|
||||
if len(packet.payload) < 2:
|
||||
return False
|
||||
|
||||
|
||||
dest_hash = packet.payload[0]
|
||||
src_hash = packet.payload[1]
|
||||
|
||||
|
||||
# Get the ACL for this destination identity
|
||||
identity_acl = self.acl_dict.get(dest_hash)
|
||||
if not identity_acl:
|
||||
logger.debug(f"No ACL for dest 0x{dest_hash:02X}, allowing forward")
|
||||
return False
|
||||
|
||||
|
||||
# Find the client by source hash
|
||||
client = None
|
||||
for client_info in identity_acl.get_all_clients():
|
||||
@@ -34,57 +34,59 @@ class PathHelper:
|
||||
if pubkey[0] == src_hash:
|
||||
client = client_info
|
||||
break
|
||||
|
||||
|
||||
if not client:
|
||||
logger.debug(f"PATH packet from unknown client 0x{src_hash:02X}, allowing forward")
|
||||
return False
|
||||
|
||||
|
||||
# Get shared secret for decryption
|
||||
shared_secret = client.shared_secret
|
||||
if not shared_secret or len(shared_secret) == 0:
|
||||
logger.debug(f"No shared secret for client 0x{src_hash:02X}, cannot decrypt PATH")
|
||||
return False
|
||||
|
||||
|
||||
# Decrypt the PATH packet payload
|
||||
# Payload format: dest_hash(1) + src_hash(1) + mac(2) + encrypted_data
|
||||
if len(packet.payload) < 4:
|
||||
logger.debug(f"PATH packet too short: {len(packet.payload)} bytes")
|
||||
return False
|
||||
|
||||
|
||||
mac_and_data = packet.payload[2:] # Skip dest_hash and src_hash
|
||||
aes_key = shared_secret[:16]
|
||||
decrypted = CryptoUtils.mac_then_decrypt(aes_key, shared_secret, mac_and_data)
|
||||
|
||||
|
||||
if not decrypted:
|
||||
logger.debug(f"Failed to decrypt PATH packet from 0x{src_hash:02X}")
|
||||
return False
|
||||
|
||||
|
||||
# Parse decrypted PATH data
|
||||
# Format: path_len(1) + path[path_len] + extra_type(1) + extra[...]
|
||||
if len(decrypted) < 1:
|
||||
logger.debug(f"Decrypted PATH data too short")
|
||||
return False
|
||||
|
||||
|
||||
path_len = decrypted[0]
|
||||
if len(decrypted) < 1 + path_len:
|
||||
logger.debug(f"PATH data truncated: need {1 + path_len} bytes, got {len(decrypted)}")
|
||||
logger.debug(
|
||||
f"PATH data truncated: need {1 + path_len} bytes, got {len(decrypted)}"
|
||||
)
|
||||
return False
|
||||
|
||||
path_data = decrypted[1:1 + path_len]
|
||||
|
||||
|
||||
path_data = decrypted[1 : 1 + path_len]
|
||||
|
||||
# Update client's out_path (same as C++ memcpy)
|
||||
client.out_path = bytearray(path_data)
|
||||
client.out_path_len = path_len
|
||||
client.last_activity = int(time.time())
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Updated out_path for client 0x{src_hash:02X} -> 0x{dest_hash:02X}: "
|
||||
f"path_len={path_len}, path={[hex(b) for b in path_data]}"
|
||||
)
|
||||
|
||||
|
||||
# Don't mark as do_not_retransmit - let it forward normally
|
||||
return False
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing PATH packet: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
@@ -10,12 +10,13 @@ import struct
|
||||
import time
|
||||
|
||||
from pymc_core.node.handlers.protocol_request import (
|
||||
ProtocolRequestHandler,
|
||||
REQ_TYPE_GET_STATUS,
|
||||
REQ_TYPE_GET_TELEMETRY_DATA,
|
||||
REQ_TYPE_GET_ACCESS_LIST,
|
||||
REQ_TYPE_GET_NEIGHBOURS,
|
||||
SERVER_RESPONSE_DELAY_MS
|
||||
REQ_TYPE_GET_OWNER_INFO,
|
||||
REQ_TYPE_GET_STATUS,
|
||||
REQ_TYPE_GET_TELEMETRY_DATA,
|
||||
SERVER_RESPONSE_DELAY_MS,
|
||||
ProtocolRequestHandler,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("ProtocolRequestHelper")
|
||||
@@ -23,8 +24,16 @@ logger = logging.getLogger("ProtocolRequestHelper")
|
||||
|
||||
class ProtocolRequestHelper:
|
||||
"""Provides repeater-specific protocol request handlers."""
|
||||
|
||||
def __init__(self, identity_manager, packet_injector=None, acl_dict=None, radio=None, engine=None, neighbor_tracker=None):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
identity_manager,
|
||||
packet_injector=None,
|
||||
acl_dict=None,
|
||||
radio=None,
|
||||
engine=None,
|
||||
neighbor_tracker=None,
|
||||
):
|
||||
|
||||
self.identity_manager = identity_manager
|
||||
self.packet_injector = packet_injector
|
||||
@@ -71,9 +80,10 @@ class ProtocolRequestHelper:
|
||||
}
|
||||
|
||||
logger.info(f"Registered protocol request handler for '{name}': hash=0x{hash_byte:02X}")
|
||||
|
||||
|
||||
def _create_acl_contacts_wrapper(self, acl):
|
||||
"""Create contacts wrapper from ACL."""
|
||||
|
||||
class ACLContactsWrapper:
|
||||
def __init__(self, identity_acl):
|
||||
self._acl = identity_acl
|
||||
@@ -138,22 +148,32 @@ class ProtocolRequestHelper:
|
||||
# uint32_t n_direct_dups;
|
||||
# uint32_t n_flood_dups;
|
||||
# uint32_t total_rx_air_time_secs;
|
||||
|
||||
|
||||
# Get stats from radio/engine
|
||||
noise_floor = int(self.radio.get_noise_floor() * 1.0) if self.radio else -120
|
||||
last_rssi = int(self.radio.last_rssi) if self.radio and hasattr(self.radio, 'last_rssi') else -120
|
||||
last_snr = int((self.radio.last_snr * 4.0) if self.radio and hasattr(self.radio, 'last_snr') else 0)
|
||||
|
||||
last_rssi = (
|
||||
int(self.radio.last_rssi) if self.radio and hasattr(self.radio, "last_rssi") else -120
|
||||
)
|
||||
last_snr = int(
|
||||
(self.radio.last_snr * 4.0) if self.radio and hasattr(self.radio, "last_snr") else 0
|
||||
)
|
||||
|
||||
# Get packet counts
|
||||
n_packets_recv = self.radio.packets_received if self.radio and hasattr(self.radio, 'packets_received') else 0
|
||||
n_packets_sent = self.radio.packets_sent if self.radio and hasattr(self.radio, 'packets_sent') else 0
|
||||
|
||||
n_packets_recv = (
|
||||
self.radio.packets_received
|
||||
if self.radio and hasattr(self.radio, "packets_received")
|
||||
else 0
|
||||
)
|
||||
n_packets_sent = (
|
||||
self.radio.packets_sent if self.radio and hasattr(self.radio, "packets_sent") else 0
|
||||
)
|
||||
|
||||
# Get airtime stats
|
||||
total_air_time_secs = 0
|
||||
total_rx_air_time_secs = 0
|
||||
if self.engine and hasattr(self.engine, 'airtime_manager'):
|
||||
if self.engine and hasattr(self.engine, "airtime_manager"):
|
||||
total_air_time_secs = int(self.engine.airtime_manager.total_tx_airtime_ms / 1000)
|
||||
|
||||
|
||||
# Get routing stats
|
||||
n_sent_flood = 0
|
||||
n_sent_direct = 0
|
||||
@@ -161,18 +181,18 @@ class ProtocolRequestHelper:
|
||||
n_recv_direct = 0
|
||||
n_direct_dups = 0
|
||||
n_flood_dups = 0
|
||||
|
||||
|
||||
if self.engine:
|
||||
n_sent_flood = getattr(self.engine, 'sent_flood_count', 0)
|
||||
n_sent_direct = getattr(self.engine, 'sent_direct_count', 0)
|
||||
n_recv_flood = getattr(self.engine, 'recv_flood_count', 0)
|
||||
n_recv_direct = getattr(self.engine, 'recv_direct_count', 0)
|
||||
n_direct_dups = getattr(self.engine, 'direct_dup_count', 0)
|
||||
n_flood_dups = getattr(self.engine, 'flood_dup_count', 0)
|
||||
|
||||
n_sent_flood = getattr(self.engine, "sent_flood_count", 0)
|
||||
n_sent_direct = getattr(self.engine, "sent_direct_count", 0)
|
||||
n_recv_flood = getattr(self.engine, "recv_flood_count", 0)
|
||||
n_recv_direct = getattr(self.engine, "recv_direct_count", 0)
|
||||
n_direct_dups = getattr(self.engine, "direct_dup_count", 0)
|
||||
n_flood_dups = getattr(self.engine, "flood_dup_count", 0)
|
||||
|
||||
# Pack struct (little-endian)
|
||||
stats = struct.pack(
|
||||
'<HHhhIIIIIIIIIhIII',
|
||||
"<HHhhIIIIIIIIIhIII",
|
||||
0, # batt_milli_volts (not available on Pi)
|
||||
0, # curr_tx_queue_len (TODO)
|
||||
noise_floor,
|
||||
|
||||
@@ -5,10 +5,11 @@ Only users with admin permissions (via ACL) can execute these commands.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, Dict, Any, Callable
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,10 +24,10 @@ class MeshCLI:
|
||||
def __init__(
|
||||
self,
|
||||
config_path: str,
|
||||
config: Dict[str, Any],
|
||||
config: Dict[str, Any],
|
||||
save_config_callback: Callable,
|
||||
identity_type: str = "repeater",
|
||||
enable_regions: bool = True
|
||||
enable_regions: bool = True,
|
||||
):
|
||||
"""
|
||||
Initialize the CLI handler.
|
||||
@@ -43,10 +44,10 @@ class MeshCLI:
|
||||
self.save_config = save_config_callback
|
||||
self.identity_type = identity_type
|
||||
self.enable_regions = enable_regions
|
||||
|
||||
|
||||
# Get repeater config shortcut
|
||||
self.repeater_config = config.get('repeater', {})
|
||||
|
||||
self.repeater_config = config.get("repeater", {})
|
||||
|
||||
def handle_command(self, sender_pubkey: bytes, command: str, is_admin: bool) -> str:
|
||||
"""
|
||||
Handle an incoming command from a client.
|
||||
@@ -64,10 +65,10 @@ class MeshCLI:
|
||||
return "Error: Admin permission required"
|
||||
|
||||
logger.debug(f"handle_command received: '{command}' (len={len(command)})")
|
||||
|
||||
|
||||
# Extract optional sequence prefix (XX|)
|
||||
prefix = ""
|
||||
if len(command) > 4 and command[2] == '|':
|
||||
if len(command) > 4 and command[2] == "|":
|
||||
prefix = command[:3]
|
||||
command = command[3:]
|
||||
logger.debug(f"Extracted prefix: '{prefix}', remaining command: '{command}'")
|
||||
@@ -180,6 +181,7 @@ class MeshCLI:
|
||||
if command == "clock":
|
||||
# Display current time
|
||||
import datetime
|
||||
|
||||
dt = datetime.datetime.utcnow()
|
||||
return f"{dt.hour:02d}:{dt.minute:02d} - {dt.day}/{dt.month}/{dt.year} UTC"
|
||||
elif command == "clock sync":
|
||||
@@ -198,13 +200,13 @@ class MeshCLI:
|
||||
|
||||
if not new_password:
|
||||
return "Error: Password cannot be empty"
|
||||
|
||||
|
||||
# Update security config
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
|
||||
self.config['security']['password'] = new_password
|
||||
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
|
||||
self.config["security"]["password"] = new_password
|
||||
|
||||
# Save config
|
||||
try:
|
||||
self.save_config()
|
||||
@@ -221,56 +223,56 @@ class MeshCLI:
|
||||
def _cmd_version(self) -> str:
|
||||
"""Get version information."""
|
||||
role = "room_server" if self.identity_type == "room_server" else "repeater"
|
||||
version = self.config.get('version', '1.0.0')
|
||||
version = self.config.get("version", "1.0.0")
|
||||
return f"pyMC_{role} v{version}"
|
||||
|
||||
|
||||
# ==================== Get Commands ====================
|
||||
|
||||
def _cmd_get(self, param: str) -> str:
|
||||
"""Handle get commands."""
|
||||
param = param.strip()
|
||||
logger.debug(f"_cmd_get called with param: '{param}' (len={len(param)})")
|
||||
|
||||
|
||||
if param == "af":
|
||||
af = self.repeater_config.get('airtime_factor', 1.0)
|
||||
af = self.repeater_config.get("airtime_factor", 1.0)
|
||||
return f"> {af}"
|
||||
|
||||
|
||||
elif param == "name":
|
||||
name = self.repeater_config.get('name', 'Unknown')
|
||||
name = self.repeater_config.get("name", "Unknown")
|
||||
return f"> {name}"
|
||||
|
||||
|
||||
elif param == "repeat":
|
||||
disabled = self.repeater_config.get('disable_forward', False)
|
||||
disabled = self.repeater_config.get("disable_forward", False)
|
||||
return f"> {'off' if disabled else 'on'}"
|
||||
|
||||
|
||||
elif param == "lat":
|
||||
lat = self.repeater_config.get('latitude', 0.0)
|
||||
lat = self.repeater_config.get("latitude", 0.0)
|
||||
return f"> {lat}"
|
||||
|
||||
|
||||
elif param == "lon":
|
||||
lon = self.repeater_config.get('longitude', 0.0)
|
||||
lon = self.repeater_config.get("longitude", 0.0)
|
||||
return f"> {lon}"
|
||||
|
||||
|
||||
elif param == "radio":
|
||||
radio = self.config.get('radio', {})
|
||||
freq_hz = radio.get('frequency', 915000000)
|
||||
bw_hz = radio.get('bandwidth', 125000)
|
||||
sf = radio.get('spreading_factor', 7)
|
||||
cr = radio.get('coding_rate', 5)
|
||||
radio = self.config.get("radio", {})
|
||||
freq_hz = radio.get("frequency", 915000000)
|
||||
bw_hz = radio.get("bandwidth", 125000)
|
||||
sf = radio.get("spreading_factor", 7)
|
||||
cr = radio.get("coding_rate", 5)
|
||||
# Convert Hz to MHz for freq, Hz to kHz for bandwidth (match C++ ftoa output)
|
||||
freq_mhz = freq_hz / 1_000_000.0
|
||||
bw_khz = bw_hz / 1_000.0
|
||||
return f"> {freq_mhz},{bw_khz},{sf},{cr}"
|
||||
|
||||
|
||||
elif param == "freq":
|
||||
freq_hz = self.config.get('radio', {}).get('frequency', 915000000)
|
||||
freq_hz = self.config.get("radio", {}).get("frequency", 915000000)
|
||||
freq_mhz = freq_hz / 1_000_000.0
|
||||
return f"> {freq_mhz}"
|
||||
|
||||
|
||||
elif param == "tx":
|
||||
power = self.config.get('radio', {}).get('tx_power', 20)
|
||||
power = self.config.get("radio", {}).get("tx_power", 20)
|
||||
return f"> {power}"
|
||||
|
||||
|
||||
elif param == "public.key":
|
||||
# TODO: Get from identity
|
||||
return "Error: Not yet implemented"
|
||||
@@ -278,51 +280,51 @@ class MeshCLI:
|
||||
elif param == "role":
|
||||
role = "room_server" if self.identity_type == "room_server" else "repeater"
|
||||
return f"> {role}"
|
||||
|
||||
|
||||
elif param == "guest.password":
|
||||
guest_pw = self.config.get('security', {}).get('guest_password', '')
|
||||
guest_pw = self.config.get("security", {}).get("guest_password", "")
|
||||
return f"> {guest_pw}"
|
||||
|
||||
|
||||
elif param == "allow.read.only":
|
||||
allow = self.config.get('security', {}).get('allow_read_only', False)
|
||||
allow = self.config.get("security", {}).get("allow_read_only", False)
|
||||
return f"> {'on' if allow else 'off'}"
|
||||
|
||||
|
||||
elif param == "advert.interval":
|
||||
interval = self.repeater_config.get('advert_interval_minutes', 120)
|
||||
interval = self.repeater_config.get("advert_interval_minutes", 120)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
elif param == "flood.advert.interval":
|
||||
interval = self.repeater_config.get('flood_advert_interval_hours', 24)
|
||||
interval = self.repeater_config.get("flood_advert_interval_hours", 24)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
elif param == "flood.max":
|
||||
max_flood = self.repeater_config.get('max_flood_hops', 3)
|
||||
max_flood = self.repeater_config.get("max_flood_hops", 3)
|
||||
return f"> {max_flood}"
|
||||
|
||||
|
||||
elif param == "rxdelay":
|
||||
delay = self.repeater_config.get('rx_delay_base', 0.0)
|
||||
delay = self.repeater_config.get("rx_delay_base", 0.0)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "txdelay":
|
||||
delay = self.repeater_config.get('tx_delay_factor', 1.0)
|
||||
delay = self.repeater_config.get("tx_delay_factor", 1.0)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "direct.txdelay":
|
||||
delay = self.repeater_config.get('direct_tx_delay_factor', 0.5)
|
||||
delay = self.repeater_config.get("direct_tx_delay_factor", 0.5)
|
||||
return f"> {delay}"
|
||||
|
||||
|
||||
elif param == "multi.acks":
|
||||
acks = self.repeater_config.get('multi_acks', 0)
|
||||
acks = self.repeater_config.get("multi_acks", 0)
|
||||
return f"> {acks}"
|
||||
|
||||
|
||||
elif param == "int.thresh":
|
||||
thresh = self.repeater_config.get('interference_threshold', -120)
|
||||
thresh = self.repeater_config.get("interference_threshold", -120)
|
||||
return f"> {thresh}"
|
||||
|
||||
|
||||
elif param == "agc.reset.interval":
|
||||
interval = self.repeater_config.get('agc_reset_interval', 0)
|
||||
interval = self.repeater_config.get("agc_reset_interval", 0)
|
||||
return f"> {interval}"
|
||||
|
||||
|
||||
else:
|
||||
return f"??: {param}"
|
||||
|
||||
@@ -335,144 +337,144 @@ class MeshCLI:
|
||||
return "Error: Missing value"
|
||||
|
||||
key, value = parts[0], parts[1]
|
||||
|
||||
|
||||
try:
|
||||
if key == "af":
|
||||
self.repeater_config['airtime_factor'] = float(value)
|
||||
self.repeater_config["airtime_factor"] = float(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "name":
|
||||
self.repeater_config['name'] = value
|
||||
self.repeater_config["name"] = value
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "repeat":
|
||||
disabled = value.lower() == "off"
|
||||
self.repeater_config['disable_forward'] = disabled
|
||||
self.repeater_config["disable_forward"] = disabled
|
||||
self.save_config()
|
||||
return f"OK - repeat is now {'OFF' if disabled else 'ON'}"
|
||||
|
||||
|
||||
elif key == "lat":
|
||||
self.repeater_config['latitude'] = float(value)
|
||||
self.repeater_config["latitude"] = float(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "lon":
|
||||
self.repeater_config['longitude'] = float(value)
|
||||
self.repeater_config["longitude"] = float(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "radio":
|
||||
# Format: freq bw sf cr
|
||||
radio_parts = value.split()
|
||||
if len(radio_parts) != 4:
|
||||
return "Error: Expected freq bw sf cr"
|
||||
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
|
||||
self.config['radio']['frequency'] = float(radio_parts[0])
|
||||
self.config['radio']['bandwidth'] = float(radio_parts[1])
|
||||
self.config['radio']['spreading_factor'] = int(radio_parts[2])
|
||||
self.config['radio']['coding_rate'] = int(radio_parts[3])
|
||||
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
|
||||
self.config["radio"]["frequency"] = float(radio_parts[0])
|
||||
self.config["radio"]["bandwidth"] = float(radio_parts[1])
|
||||
self.config["radio"]["spreading_factor"] = int(radio_parts[2])
|
||||
self.config["radio"]["coding_rate"] = int(radio_parts[3])
|
||||
self.save_config()
|
||||
return "OK - restart repeater to apply"
|
||||
|
||||
|
||||
elif key == "freq":
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
self.config['radio']['frequency'] = float(value)
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
self.config["radio"]["frequency"] = float(value)
|
||||
self.save_config()
|
||||
return "OK - restart repeater to apply"
|
||||
|
||||
|
||||
elif key == "tx":
|
||||
if 'radio' not in self.config:
|
||||
self.config['radio'] = {}
|
||||
self.config['radio']['tx_power'] = int(value)
|
||||
if "radio" not in self.config:
|
||||
self.config["radio"] = {}
|
||||
self.config["radio"]["tx_power"] = int(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "guest.password":
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
self.config['security']['guest_password'] = value
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
self.config["security"]["guest_password"] = value
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "allow.read.only":
|
||||
if 'security' not in self.config:
|
||||
self.config['security'] = {}
|
||||
self.config['security']['allow_read_only'] = value.lower() == "on"
|
||||
if "security" not in self.config:
|
||||
self.config["security"] = {}
|
||||
self.config["security"]["allow_read_only"] = value.lower() == "on"
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "advert.interval":
|
||||
mins = int(value)
|
||||
if mins > 0 and (mins < 60 or mins > 240):
|
||||
return "Error: interval range is 60-240 minutes"
|
||||
self.repeater_config['advert_interval_minutes'] = mins
|
||||
self.repeater_config["advert_interval_minutes"] = mins
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "flood.advert.interval":
|
||||
hours = int(value)
|
||||
if (hours > 0 and hours < 3) or hours > 48:
|
||||
return "Error: interval range is 3-48 hours"
|
||||
self.repeater_config['flood_advert_interval_hours'] = hours
|
||||
self.repeater_config["flood_advert_interval_hours"] = hours
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "flood.max":
|
||||
max_val = int(value)
|
||||
if max_val > 64:
|
||||
return "Error: max 64"
|
||||
self.repeater_config['max_flood_hops'] = max_val
|
||||
self.repeater_config["max_flood_hops"] = max_val
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "rxdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['rx_delay_base'] = delay
|
||||
self.repeater_config["rx_delay_base"] = delay
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "txdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['tx_delay_factor'] = delay
|
||||
self.repeater_config["tx_delay_factor"] = delay
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "direct.txdelay":
|
||||
delay = float(value)
|
||||
if delay < 0:
|
||||
return "Error: cannot be negative"
|
||||
self.repeater_config['direct_tx_delay_factor'] = delay
|
||||
self.repeater_config["direct_tx_delay_factor"] = delay
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "multi.acks":
|
||||
self.repeater_config['multi_acks'] = int(value)
|
||||
self.repeater_config["multi_acks"] = int(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "int.thresh":
|
||||
self.repeater_config['interference_threshold'] = int(value)
|
||||
self.repeater_config["interference_threshold"] = int(value)
|
||||
self.save_config()
|
||||
return "OK"
|
||||
|
||||
|
||||
elif key == "agc.reset.interval":
|
||||
interval = int(value)
|
||||
# Round to nearest multiple of 4
|
||||
rounded = (interval // 4) * 4
|
||||
self.repeater_config['agc_reset_interval'] = rounded
|
||||
self.repeater_config["agc_reset_interval"] = rounded
|
||||
self.save_config()
|
||||
return f"OK - interval rounded to {rounded}"
|
||||
|
||||
|
||||
else:
|
||||
return f"unknown config: {key}"
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from pymc_core.protocol import PacketBuilder, CryptoUtils
|
||||
from pymc_core.protocol import CryptoUtils, PacketBuilder
|
||||
from pymc_core.protocol.constants import PAYLOAD_TYPE_TXT_MSG
|
||||
|
||||
logger = logging.getLogger("RoomServer")
|
||||
@@ -51,7 +51,7 @@ class GlobalRateLimiter:
|
||||
self.min_gap = min_gap_seconds # Minimum gap between consecutive messages
|
||||
self.lock = asyncio.Lock() # Only one transmission at a time
|
||||
self.last_release_time = 0
|
||||
|
||||
|
||||
async def acquire(self):
|
||||
|
||||
async with self.lock:
|
||||
@@ -64,7 +64,7 @@ class GlobalRateLimiter:
|
||||
await asyncio.sleep(wait_time)
|
||||
# Lock is now held - caller can transmit
|
||||
# Will be released when context exits
|
||||
|
||||
|
||||
def release(self):
|
||||
self.last_release_time = time.time()
|
||||
|
||||
@@ -82,43 +82,48 @@ class RoomServer:
|
||||
max_posts: int = 32,
|
||||
config_path: str = None,
|
||||
config: dict = None,
|
||||
config_manager = None,
|
||||
send_advert_callback = None
|
||||
config_manager=None,
|
||||
send_advert_callback=None,
|
||||
):
|
||||
|
||||
|
||||
self.room_hash = room_hash
|
||||
self.room_name = room_name
|
||||
self.local_identity = local_identity
|
||||
self.db = sqlite_handler
|
||||
self.packet_injector = packet_injector
|
||||
self.acl = acl
|
||||
|
||||
|
||||
# Create send_advert callback for this room server
|
||||
async def send_room_advert():
|
||||
"""Send advertisement for this specific room server."""
|
||||
if not packet_injector or not local_identity:
|
||||
logger.error(f"Room '{room_name}': Cannot send advert - missing injector or identity")
|
||||
logger.error(
|
||||
f"Room '{room_name}': Cannot send advert - missing injector or identity"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
from pymc_core.protocol import PacketBuilder
|
||||
from pymc_core.protocol.constants import ADVERT_FLAG_HAS_NAME, ADVERT_FLAG_IS_ROOM_SERVER
|
||||
|
||||
from pymc_core.protocol.constants import (
|
||||
ADVERT_FLAG_HAS_NAME,
|
||||
ADVERT_FLAG_IS_ROOM_SERVER,
|
||||
)
|
||||
|
||||
# Get room config
|
||||
room_config = config.get('identities', {}).get('room_servers', [])
|
||||
room_config = config.get("identities", {}).get("room_servers", [])
|
||||
room_settings = {}
|
||||
for rs in room_config:
|
||||
if rs.get('name') == room_name:
|
||||
room_settings = rs.get('settings', {})
|
||||
if rs.get("name") == room_name:
|
||||
room_settings = rs.get("settings", {})
|
||||
break
|
||||
|
||||
|
||||
# Use room-specific name and location
|
||||
node_name = room_settings.get('room_name', room_name)
|
||||
latitude = room_settings.get('latitude', 0.0)
|
||||
longitude = room_settings.get('longitude', 0.0)
|
||||
|
||||
node_name = room_settings.get("room_name", room_name)
|
||||
latitude = room_settings.get("latitude", 0.0)
|
||||
longitude = room_settings.get("longitude", 0.0)
|
||||
|
||||
flags = ADVERT_FLAG_IS_ROOM_SERVER | ADVERT_FLAG_HAS_NAME
|
||||
|
||||
|
||||
packet = PacketBuilder.create_advert(
|
||||
local_identity=local_identity,
|
||||
name=node_name,
|
||||
@@ -129,21 +134,24 @@ class RoomServer:
|
||||
flags=flags,
|
||||
route_type="flood",
|
||||
)
|
||||
|
||||
|
||||
# Send via packet injector
|
||||
await packet_injector(packet, wait_for_ack=False)
|
||||
|
||||
logger.info(f"Room '{room_name}': Sent flood advert '{node_name}' at ({latitude:.6f}, {longitude:.6f})")
|
||||
|
||||
logger.info(
|
||||
f"Room '{room_name}': Sent flood advert '{node_name}' at ({latitude:.6f}, {longitude:.6f})"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Room '{room_name}': Failed to send advert: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
# Initialize CLI handler for room server commands
|
||||
self.cli = None
|
||||
if config_path and config and config_manager:
|
||||
from .mesh_cli import MeshCLI
|
||||
|
||||
self.cli = MeshCLI(
|
||||
config_path,
|
||||
config,
|
||||
@@ -152,10 +160,10 @@ class RoomServer:
|
||||
enable_regions=False, # Room servers don't support region commands
|
||||
send_advert_callback=send_room_advert,
|
||||
identity=local_identity,
|
||||
storage_handler=sqlite_handler
|
||||
storage_handler=sqlite_handler,
|
||||
)
|
||||
logger.info(f"Room '{room_name}': Initialized CLI handler with identity and storage")
|
||||
|
||||
|
||||
# Enforce hard limit (match C++ MAX_UNSYNCED_POSTS)
|
||||
if max_posts > MAX_UNSYNCED_POSTS:
|
||||
logger.warning(
|
||||
@@ -164,45 +172,45 @@ class RoomServer:
|
||||
)
|
||||
max_posts = MAX_UNSYNCED_POSTS
|
||||
self.max_posts = max_posts
|
||||
|
||||
|
||||
# Round-robin state
|
||||
self.next_client_idx = 0
|
||||
self.next_push_time = 0
|
||||
|
||||
|
||||
# Cleanup tracking
|
||||
self.last_cleanup_time = time.time()
|
||||
self.cleanup_interval = 600 # Cleanup every 10 minutes
|
||||
|
||||
|
||||
# Safety and monitoring
|
||||
self.client_post_times = {} # Track last N post times per client for rate limiting
|
||||
self.consecutive_sync_errors = 0 # Circuit breaker counter
|
||||
self.last_eviction_check = time.time()
|
||||
self.eviction_check_interval = 300 # Check every 5 minutes
|
||||
|
||||
|
||||
# Initialize global rate limiter (singleton)
|
||||
global _global_push_limiter
|
||||
if _global_push_limiter is None:
|
||||
_global_push_limiter = GlobalRateLimiter(GLOBAL_MIN_GAP_BETWEEN_MESSAGES)
|
||||
self.global_limiter = _global_push_limiter
|
||||
|
||||
|
||||
# Background task handle
|
||||
self._sync_task = None
|
||||
self._running = False
|
||||
|
||||
|
||||
logger.info(
|
||||
f"RoomServer initialized: name='{room_name}', "
|
||||
f"hash=0x{room_hash:02X}, max_posts={max_posts}"
|
||||
)
|
||||
|
||||
|
||||
async def start(self):
|
||||
if self._running:
|
||||
logger.warning(f"Room '{self.room_name}' sync loop already running")
|
||||
return
|
||||
|
||||
|
||||
self._running = True
|
||||
self._sync_task = asyncio.create_task(self._sync_loop())
|
||||
logger.info(f"Room '{self.room_name}' sync loop started")
|
||||
|
||||
|
||||
async def stop(self):
|
||||
self._running = False
|
||||
if self._sync_task:
|
||||
@@ -212,14 +220,14 @@ class RoomServer:
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
logger.info(f"Room '{self.room_name}' sync loop stopped")
|
||||
|
||||
|
||||
async def add_post(
|
||||
self,
|
||||
client_pubkey: bytes,
|
||||
message_text: str,
|
||||
sender_timestamp: int,
|
||||
txt_type: int = TXT_TYPE_PLAIN,
|
||||
allow_server_author: bool = False
|
||||
allow_server_author: bool = False,
|
||||
) -> bool:
|
||||
|
||||
try:
|
||||
@@ -230,20 +238,19 @@ class RoomServer:
|
||||
f"exceeds max length ({len(message_text)} > {MAX_MESSAGE_LENGTH}), truncating"
|
||||
)
|
||||
message_text = message_text[:MAX_MESSAGE_LENGTH]
|
||||
|
||||
|
||||
# SAFETY: Rate limit per client
|
||||
client_key = client_pubkey.hex()
|
||||
now = time.time()
|
||||
|
||||
|
||||
if client_key not in self.client_post_times:
|
||||
self.client_post_times[client_key] = []
|
||||
|
||||
|
||||
# Remove timestamps older than 1 minute
|
||||
self.client_post_times[client_key] = [
|
||||
t for t in self.client_post_times[client_key]
|
||||
if now - t < 60
|
||||
t for t in self.client_post_times[client_key] if now - t < 60
|
||||
]
|
||||
|
||||
|
||||
# Check rate limit
|
||||
if len(self.client_post_times[client_key]) >= MAX_POSTS_PER_CLIENT_PER_MINUTE:
|
||||
logger.warning(
|
||||
@@ -251,13 +258,13 @@ class RoomServer:
|
||||
f"exceeded rate limit ({MAX_POSTS_PER_CLIENT_PER_MINUTE} posts/min), dropping message"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
# Record this post time
|
||||
self.client_post_times[client_key].append(now)
|
||||
|
||||
|
||||
# Use our RTC time for post_timestamp
|
||||
post_timestamp = time.time()
|
||||
|
||||
|
||||
# Store to database
|
||||
msg_id = self.db.insert_room_message(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
@@ -265,22 +272,22 @@ class RoomServer:
|
||||
message_text=message_text,
|
||||
post_timestamp=post_timestamp,
|
||||
sender_timestamp=sender_timestamp,
|
||||
txt_type=txt_type
|
||||
txt_type=txt_type,
|
||||
)
|
||||
|
||||
|
||||
if msg_id:
|
||||
logger.info(
|
||||
f"Room '{self.room_name}': New post #{msg_id} from "
|
||||
f"{client_pubkey[:4].hex()}: {message_text[:50]}"
|
||||
)
|
||||
|
||||
|
||||
# Log authenticated clients count for debugging distribution
|
||||
all_clients = self.acl.get_all_clients()
|
||||
logger.info(
|
||||
f"Room '{self.room_name}': Message stored, will distribute to "
|
||||
f"{len(all_clients)} authenticated client(s)"
|
||||
)
|
||||
|
||||
|
||||
# Update client's sync_since to this message's timestamp
|
||||
# This prevents the author from receiving their own message back
|
||||
# Also update activity timestamp (they're clearly active if posting)
|
||||
@@ -292,43 +299,43 @@ class RoomServer:
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey.hex(),
|
||||
sync_since=post_timestamp, # Don't send this message back to author
|
||||
last_activity=time.time()
|
||||
last_activity=time.time(),
|
||||
)
|
||||
|
||||
|
||||
# Trigger push notification
|
||||
self.next_push_time = time.time() + (PUSH_NOTIFY_DELAY_MS / 1000.0)
|
||||
|
||||
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Failed to store message to database")
|
||||
return False
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding post: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
async def push_post_to_client(self, client_info, post: Dict) -> bool:
|
||||
|
||||
|
||||
try:
|
||||
# SAFETY: Global transmission lock - only ONE message on radio at a time
|
||||
# This is critical because LoRa is serial (0.5-9s airtime per message)
|
||||
await self.global_limiter.acquire()
|
||||
|
||||
|
||||
# SAFETY: Check client failure backoff
|
||||
sync_state = self.db.get_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_info.id.get_public_key().hex()
|
||||
client_pubkey=client_info.id.get_public_key().hex(),
|
||||
)
|
||||
|
||||
|
||||
if sync_state:
|
||||
failures = sync_state.get('push_failures', 0)
|
||||
failures = sync_state.get("push_failures", 0)
|
||||
if failures > 0:
|
||||
# Apply exponential backoff
|
||||
backoff_idx = min(failures, len(RETRY_BACKOFF_SCHEDULE) - 1)
|
||||
backoff_delay = RETRY_BACKOFF_SCHEDULE[backoff_idx]
|
||||
last_failure_time = sync_state.get('updated_at', 0)
|
||||
last_failure_time = sync_state.get("updated_at", 0)
|
||||
time_since_failure = time.time() - last_failure_time
|
||||
|
||||
|
||||
if time_since_failure < backoff_delay:
|
||||
wait_time = backoff_delay - time_since_failure
|
||||
logger.debug(
|
||||
@@ -336,33 +343,30 @@ class RoomServer:
|
||||
f"in backoff (failure {failures}), waiting {wait_time:.0f}s"
|
||||
)
|
||||
return False # Skip this client for now
|
||||
|
||||
|
||||
# Build message payload
|
||||
timestamp = int(time.time())
|
||||
flags = (TXT_TYPE_SIGNED_PLAIN << 2) # Include author prefix
|
||||
|
||||
flags = TXT_TYPE_SIGNED_PLAIN << 2 # Include author prefix
|
||||
|
||||
# Author prefix (first 4 bytes of pubkey)
|
||||
author_pubkey = bytes.fromhex(post['author_pubkey'])
|
||||
author_pubkey = bytes.fromhex(post["author_pubkey"])
|
||||
author_prefix = author_pubkey[:4]
|
||||
|
||||
|
||||
# Plaintext: timestamp(4) + flags(1) + author_prefix(4) + text
|
||||
message_bytes = post['message_text'].encode('utf-8')
|
||||
message_bytes = post["message_text"].encode("utf-8")
|
||||
plaintext = (
|
||||
timestamp.to_bytes(4, 'little') +
|
||||
bytes([flags]) +
|
||||
author_prefix +
|
||||
message_bytes
|
||||
timestamp.to_bytes(4, "little") + bytes([flags]) + author_prefix + message_bytes
|
||||
)
|
||||
|
||||
|
||||
# Calculate expected ACK (same algorithm as pymc_core)
|
||||
attempt = 0
|
||||
pack_data = PacketBuilder._pack_timestamp_data(timestamp, attempt, message_bytes)
|
||||
ack_hash = CryptoUtils.sha256(pack_data + client_info.id.get_public_key())[:4]
|
||||
expected_ack_crc = int.from_bytes(ack_hash, 'little')
|
||||
|
||||
expected_ack_crc = int.from_bytes(ack_hash, "little")
|
||||
|
||||
# Determine routing based on stored out_path
|
||||
route_type = "flood" if client_info.out_path_len < 0 else "direct"
|
||||
|
||||
|
||||
# Create datagram
|
||||
packet = PacketBuilder.create_datagram(
|
||||
ptype=PAYLOAD_TYPE_TXT_MSG,
|
||||
@@ -370,41 +374,42 @@ class RoomServer:
|
||||
local_identity=self.local_identity,
|
||||
secret=client_info.shared_secret,
|
||||
plaintext=plaintext,
|
||||
route_type=route_type
|
||||
route_type=route_type,
|
||||
)
|
||||
|
||||
|
||||
# Add stored path for direct routing
|
||||
if route_type == "direct" and len(client_info.out_path) > 0:
|
||||
packet.path = bytearray(client_info.out_path[:client_info.out_path_len])
|
||||
packet.path = bytearray(client_info.out_path[: client_info.out_path_len])
|
||||
packet.path_len = client_info.out_path_len
|
||||
|
||||
|
||||
# Calculate ACK timeout
|
||||
if route_type == "flood":
|
||||
ack_timeout = PUSH_ACK_TIMEOUT_FLOOD_MS / 1000.0
|
||||
else:
|
||||
path_len = client_info.out_path_len if client_info.out_path_len >= 0 else 0
|
||||
ack_timeout = (PUSH_TIMEOUT_BASE_MS + PUSH_ACK_TIMEOUT_FACTOR_MS * (path_len + 1)) / 1000.0
|
||||
|
||||
ack_timeout = (
|
||||
PUSH_TIMEOUT_BASE_MS + PUSH_ACK_TIMEOUT_FACTOR_MS * (path_len + 1)
|
||||
) / 1000.0
|
||||
|
||||
# Update client sync state with pending ACK
|
||||
self.db.upsert_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_info.id.get_public_key().hex(),
|
||||
pending_ack_crc=expected_ack_crc,
|
||||
push_post_timestamp=post['post_timestamp'],
|
||||
ack_timeout_time=time.time() + ack_timeout
|
||||
push_post_timestamp=post["post_timestamp"],
|
||||
ack_timeout_time=time.time() + ack_timeout,
|
||||
)
|
||||
# Send packet (dispatcher will track ACK automatically)
|
||||
# This blocks for the entire transmission duration (0.5-9 seconds)
|
||||
success = await self.packet_injector(packet, wait_for_ack=True)
|
||||
|
||||
|
||||
# SAFETY: Release transmission lock AFTER send completes
|
||||
self.global_limiter.release()
|
||||
|
||||
|
||||
if success:
|
||||
# ACK received! Update sync state
|
||||
await self._handle_ack_received(
|
||||
client_info.id.get_public_key(),
|
||||
post['post_timestamp']
|
||||
client_info.id.get_public_key(), post["post_timestamp"]
|
||||
)
|
||||
logger.info(
|
||||
f"Room '{self.room_name}': Pushed post to "
|
||||
@@ -417,13 +422,13 @@ class RoomServer:
|
||||
f"Room '{self.room_name}': Push to "
|
||||
f"0x{client_info.id.get_public_key()[0]:02X} timed out"
|
||||
)
|
||||
|
||||
|
||||
return success
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error pushing post to client: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
async def _handle_ack_received(self, client_pubkey: bytes, post_timestamp: float):
|
||||
|
||||
try:
|
||||
@@ -434,29 +439,28 @@ class RoomServer:
|
||||
sync_since=post_timestamp,
|
||||
pending_ack_crc=0,
|
||||
push_failures=0,
|
||||
last_activity=time.time()
|
||||
last_activity=time.time(),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling ACK received: {e}")
|
||||
|
||||
|
||||
async def _handle_ack_timeout(self, client_pubkey: bytes):
|
||||
try:
|
||||
# Get current sync state
|
||||
sync_state = self.db.get_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey.hex()
|
||||
room_hash=f"0x{self.room_hash:02X}", client_pubkey=client_pubkey.hex()
|
||||
)
|
||||
|
||||
|
||||
if sync_state:
|
||||
# Increment failure counter, clear pending_ack
|
||||
failures = sync_state.get('push_failures', 0) + 1
|
||||
failures = sync_state.get("push_failures", 0) + 1
|
||||
self.db.upsert_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey.hex(),
|
||||
push_failures=failures,
|
||||
pending_ack_crc=0
|
||||
pending_ack_crc=0,
|
||||
)
|
||||
|
||||
|
||||
if failures >= 3:
|
||||
logger.warning(
|
||||
f"Room '{self.room_name}': Client 0x{client_pubkey[0]:02X} "
|
||||
@@ -464,86 +468,86 @@ class RoomServer:
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling ACK timeout: {e}")
|
||||
|
||||
|
||||
def get_unsynced_count(self, client_pubkey: bytes) -> int:
|
||||
try:
|
||||
# Get client's sync state
|
||||
sync_state = self.db.get_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey.hex()
|
||||
room_hash=f"0x{self.room_hash:02X}", client_pubkey=client_pubkey.hex()
|
||||
)
|
||||
|
||||
sync_since = sync_state['sync_since'] if sync_state else 0
|
||||
|
||||
|
||||
sync_since = sync_state["sync_since"] if sync_state else 0
|
||||
|
||||
return self.db.get_unsynced_count(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey.hex(),
|
||||
sync_since=sync_since
|
||||
sync_since=sync_since,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting unsynced count: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
async def _evict_failed_clients(self):
|
||||
try:
|
||||
now = time.time()
|
||||
all_sync_states = self.db.get_all_room_clients(f"0x{self.room_hash:02X}")
|
||||
|
||||
|
||||
for sync_state in all_sync_states:
|
||||
client_pubkey_hex = sync_state['client_pubkey']
|
||||
push_failures = sync_state.get('push_failures', 0)
|
||||
last_activity = sync_state.get('last_activity', 0)
|
||||
|
||||
client_pubkey_hex = sync_state["client_pubkey"]
|
||||
push_failures = sync_state.get("push_failures", 0)
|
||||
last_activity = sync_state.get("last_activity", 0)
|
||||
|
||||
# Skip already-evicted clients (marked with last_activity=0)
|
||||
if last_activity == 0:
|
||||
continue
|
||||
|
||||
|
||||
evict = False
|
||||
reason = ""
|
||||
|
||||
|
||||
# Check max failures
|
||||
if push_failures >= MAX_PUSH_FAILURES:
|
||||
evict = True
|
||||
reason = f"max failures ({push_failures})"
|
||||
|
||||
|
||||
# Check inactivity timeout
|
||||
elif now - last_activity > INACTIVE_CLIENT_TIMEOUT:
|
||||
evict = True
|
||||
reason = f"inactive for {(now - last_activity) / 60:.0f} minutes"
|
||||
|
||||
|
||||
if evict:
|
||||
# Remove from database
|
||||
self.db.upsert_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client_pubkey_hex,
|
||||
last_activity=0 # Mark as evicted
|
||||
last_activity=0, # Mark as evicted
|
||||
)
|
||||
|
||||
|
||||
# Remove from ACL
|
||||
client_pubkey = bytes.fromhex(client_pubkey_hex)
|
||||
self.acl.remove_client(client_pubkey)
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Room '{self.room_name}': Evicted client "
|
||||
f"0x{client_pubkey[0]:02X} ({reason})"
|
||||
)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error evicting failed clients: {e}", exc_info=True)
|
||||
|
||||
|
||||
async def _sync_loop(self):
|
||||
|
||||
# SAFETY: Stagger room startup to prevent thundering herd
|
||||
import random
|
||||
|
||||
startup_delay = random.uniform(0, 5) # 0-5 second random delay
|
||||
await asyncio.sleep(startup_delay)
|
||||
|
||||
|
||||
logger.info(f"Room '{self.room_name}' sync loop starting (delayed {startup_delay:.1f}s)")
|
||||
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
await asyncio.sleep(SYNC_PUSH_INTERVAL_MS / 1000.0)
|
||||
|
||||
|
||||
# SAFETY: Circuit breaker - stop if too many consecutive errors
|
||||
if self.consecutive_sync_errors >= MAX_CONSECUTIVE_SYNC_ERRORS:
|
||||
logger.error(
|
||||
@@ -553,21 +557,21 @@ class RoomServer:
|
||||
await asyncio.sleep(DB_ERROR_RETRY_DELAY)
|
||||
self.consecutive_sync_errors = 0 # Reset after pause
|
||||
continue
|
||||
|
||||
|
||||
# SAFETY: Periodic eviction check (every 5 minutes)
|
||||
if time.time() - self.last_eviction_check > self.eviction_check_interval:
|
||||
await self._evict_failed_clients()
|
||||
self.last_eviction_check = time.time()
|
||||
|
||||
|
||||
# Periodic cleanup check (every 10 minutes)
|
||||
if time.time() - self.last_cleanup_time > self.cleanup_interval:
|
||||
await self._cleanup_old_messages()
|
||||
self.last_cleanup_time = time.time()
|
||||
|
||||
|
||||
# Check if it's time to push
|
||||
if time.time() < self.next_push_time:
|
||||
continue
|
||||
|
||||
|
||||
# Get all clients for this room
|
||||
all_clients = self.acl.get_all_clients()
|
||||
if not all_clients:
|
||||
@@ -575,60 +579,66 @@ class RoomServer:
|
||||
# to avoid log spam when room is idle
|
||||
self.next_push_time = time.time() + 1.0 # Check again in 1 second
|
||||
continue
|
||||
|
||||
|
||||
# SAFETY: Limit number of clients
|
||||
if len(all_clients) > MAX_CLIENTS_PER_ROOM:
|
||||
logger.warning(
|
||||
f"Room '{self.room_name}': Too many clients ({len(all_clients)} > {MAX_CLIENTS_PER_ROOM})"
|
||||
)
|
||||
all_clients = all_clients[:MAX_CLIENTS_PER_ROOM]
|
||||
|
||||
|
||||
# Check for ACK timeouts first
|
||||
await self._check_ack_timeouts()
|
||||
|
||||
|
||||
# Track how many clients we've checked in this iteration
|
||||
clients_checked = 0
|
||||
max_checks = len(all_clients)
|
||||
|
||||
|
||||
# Round-robin: find next active client
|
||||
while clients_checked < max_checks:
|
||||
# Get next client
|
||||
if self.next_client_idx >= len(all_clients):
|
||||
self.next_client_idx = 0
|
||||
|
||||
|
||||
client = all_clients[self.next_client_idx]
|
||||
self.next_client_idx = (self.next_client_idx + 1) % len(all_clients)
|
||||
clients_checked += 1
|
||||
|
||||
|
||||
# Get client sync state
|
||||
sync_state = self.db.get_client_sync(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client.id.get_public_key().hex()
|
||||
client_pubkey=client.id.get_public_key().hex(),
|
||||
)
|
||||
|
||||
|
||||
# Skip if already waiting for ACK, evicted, or max failures
|
||||
if sync_state:
|
||||
pending_ack = sync_state.get('pending_ack_crc', 0)
|
||||
last_activity = sync_state.get('last_activity', 0)
|
||||
push_failures = sync_state.get('push_failures', 0)
|
||||
|
||||
pending_ack = sync_state.get("pending_ack_crc", 0)
|
||||
last_activity = sync_state.get("last_activity", 0)
|
||||
push_failures = sync_state.get("push_failures", 0)
|
||||
|
||||
if pending_ack != 0:
|
||||
logger.debug(f"Skipping client 0x{client.id.get_public_key()[0]:02X} (waiting for ACK)")
|
||||
logger.debug(
|
||||
f"Skipping client 0x{client.id.get_public_key()[0]:02X} (waiting for ACK)"
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
if last_activity == 0:
|
||||
logger.debug(f"Skipping client 0x{client.id.get_public_key()[0]:02X} (evicted)")
|
||||
logger.debug(
|
||||
f"Skipping client 0x{client.id.get_public_key()[0]:02X} (evicted)"
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
if push_failures >= 3:
|
||||
logger.debug(f"Skipping client 0x{client.id.get_public_key()[0]:02X} (max failures)")
|
||||
logger.debug(
|
||||
f"Skipping client 0x{client.id.get_public_key()[0]:02X} (max failures)"
|
||||
)
|
||||
continue
|
||||
|
||||
sync_since = sync_state.get('sync_since', 0)
|
||||
|
||||
sync_since = sync_state.get("sync_since", 0)
|
||||
else:
|
||||
# Initialize sync state for new client
|
||||
# Use sync_since from ACL client (sent during login) if available
|
||||
sync_since = client.sync_since if hasattr(client, 'sync_since') else 0
|
||||
sync_since = client.sync_since if hasattr(client, "sync_since") else 0
|
||||
logger.info(
|
||||
f"Room '{self.room_name}': Initializing client "
|
||||
f"0x{client.id.get_public_key()[0]:02X} with sync_since={sync_since}"
|
||||
@@ -637,17 +647,17 @@ class RoomServer:
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client.id.get_public_key().hex(),
|
||||
sync_since=sync_since,
|
||||
last_activity=time.time()
|
||||
last_activity=time.time(),
|
||||
)
|
||||
|
||||
|
||||
# Find next unsynced message for this client
|
||||
unsynced = self.db.get_unsynced_messages(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
client_pubkey=client.id.get_public_key().hex(),
|
||||
sync_since=sync_since,
|
||||
limit=1
|
||||
limit=1,
|
||||
)
|
||||
|
||||
|
||||
if unsynced:
|
||||
post = unsynced[0]
|
||||
logger.debug(
|
||||
@@ -656,7 +666,7 @@ class RoomServer:
|
||||
)
|
||||
# Check if enough time has passed since post creation
|
||||
now = time.time()
|
||||
if now >= post['post_timestamp'] + POST_SYNC_DELAY_SECS:
|
||||
if now >= post["post_timestamp"] + POST_SYNC_DELAY_SECS:
|
||||
# Push this post
|
||||
await self.push_post_to_client(client, post)
|
||||
self.next_push_time = time.time() + (SYNC_PUSH_INTERVAL_MS / 1000.0)
|
||||
@@ -668,15 +678,15 @@ class RoomServer:
|
||||
else:
|
||||
# No unsynced posts for this client, try next client
|
||||
continue
|
||||
|
||||
|
||||
# If we checked all clients and none were active/ready
|
||||
if clients_checked >= max_checks:
|
||||
# All clients skipped or no messages - wait longer before next check
|
||||
self.next_push_time = time.time() + 5.0 # Wait 5 seconds
|
||||
|
||||
|
||||
# SAFETY: Reset error counter on successful iteration
|
||||
self.consecutive_sync_errors = 0
|
||||
|
||||
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
@@ -684,35 +694,34 @@ class RoomServer:
|
||||
self.consecutive_sync_errors += 1
|
||||
logger.error(
|
||||
f"Room '{self.room_name}': Sync loop error #{self.consecutive_sync_errors}: {e}",
|
||||
exc_info=True
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
# SAFETY: Back off on errors
|
||||
backoff = min(self.consecutive_sync_errors, 10) # Cap at 10 seconds
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
|
||||
logger.info(f"Room '{self.room_name}' sync loop stopped")
|
||||
|
||||
|
||||
async def _check_ack_timeouts(self):
|
||||
try:
|
||||
now = time.time()
|
||||
all_sync_states = self.db.get_all_room_clients(f"0x{self.room_hash:02X}")
|
||||
|
||||
|
||||
for sync_state in all_sync_states:
|
||||
if sync_state['pending_ack_crc'] != 0:
|
||||
timeout_time = sync_state.get('ack_timeout_time', 0)
|
||||
if sync_state["pending_ack_crc"] != 0:
|
||||
timeout_time = sync_state.get("ack_timeout_time", 0)
|
||||
if now >= timeout_time:
|
||||
# ACK timeout
|
||||
client_pubkey = bytes.fromhex(sync_state['client_pubkey'])
|
||||
client_pubkey = bytes.fromhex(sync_state["client_pubkey"])
|
||||
await self._handle_ack_timeout(client_pubkey)
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking ACK timeouts: {e}")
|
||||
|
||||
|
||||
async def _cleanup_old_messages(self):
|
||||
try:
|
||||
deleted = self.db.cleanup_old_messages(
|
||||
room_hash=f"0x{self.room_hash:02X}",
|
||||
keep_count=self.max_posts
|
||||
room_hash=f"0x{self.room_hash:02X}", keep_count=self.max_posts
|
||||
)
|
||||
if deleted > 0:
|
||||
logger.info(f"Room '{self.room_name}': Cleaned up {deleted} old messages")
|
||||
|
||||
+179
-140
@@ -12,6 +12,7 @@ import struct
|
||||
import time
|
||||
|
||||
from pymc_core.node.handlers.text import TextMessageHandler
|
||||
|
||||
from .mesh_cli import MeshCLI
|
||||
from .room_server import RoomServer
|
||||
|
||||
@@ -24,9 +25,18 @@ TXT_TYPE_CLI_DATA = 0x01
|
||||
|
||||
class TextHelper:
|
||||
|
||||
def __init__(self, identity_manager, packet_injector=None, acl_dict=None, log_fn=None,
|
||||
config_path: str = None, config: dict = None, config_manager=None,
|
||||
sqlite_handler=None, send_advert_callback=None):
|
||||
def __init__(
|
||||
self,
|
||||
identity_manager,
|
||||
packet_injector=None,
|
||||
acl_dict=None,
|
||||
log_fn=None,
|
||||
config_path: str = None,
|
||||
config: dict = None,
|
||||
config_manager=None,
|
||||
sqlite_handler=None,
|
||||
send_advert_callback=None,
|
||||
):
|
||||
|
||||
self.identity_manager = identity_manager
|
||||
self.packet_injector = packet_injector
|
||||
@@ -34,47 +44,43 @@ class TextHelper:
|
||||
self.acl_dict = acl_dict or {} # Per-identity ACLs keyed by hash_byte
|
||||
self.sqlite_handler = sqlite_handler # For room server database operations
|
||||
self.send_advert_callback = send_advert_callback # Callback to send repeater advert
|
||||
|
||||
|
||||
# Dictionary of handlers keyed by dest_hash
|
||||
self.handlers = {}
|
||||
|
||||
|
||||
# Dictionary of room servers keyed by dest_hash
|
||||
self.room_servers = {}
|
||||
|
||||
|
||||
# Track repeater identity for CLI commands
|
||||
self.repeater_hash = None
|
||||
|
||||
|
||||
# Store config for later use
|
||||
self.config_path = config_path
|
||||
self.config = config
|
||||
self.config_manager = config_manager
|
||||
|
||||
|
||||
# Store for later CLI initialization (needs identity and storage)
|
||||
self.config_path = config_path
|
||||
self.config = config
|
||||
|
||||
|
||||
# Initialize CLI handler later when repeater identity is registered
|
||||
self.cli = None
|
||||
|
||||
def register_identity(
|
||||
self,
|
||||
name: str,
|
||||
identity,
|
||||
identity_type: str = "room_server",
|
||||
radio_config=None
|
||||
self, name: str, identity, identity_type: str = "room_server", radio_config=None
|
||||
):
|
||||
|
||||
hash_byte = identity.get_public_key()[0]
|
||||
|
||||
|
||||
# Get ACL for this identity
|
||||
identity_acl = self.acl_dict.get(hash_byte)
|
||||
if not identity_acl:
|
||||
logger.warning(f"Cannot register identity '{name}': no ACL for hash 0x{hash_byte:02X}")
|
||||
return
|
||||
|
||||
|
||||
# Create a contacts wrapper from this identity's ACL
|
||||
acl_contacts = self._create_acl_contacts_wrapper(identity_acl)
|
||||
|
||||
|
||||
# Create TextMessageHandler for this identity
|
||||
handler = TextMessageHandler(
|
||||
local_identity=identity,
|
||||
@@ -83,7 +89,7 @@ class TextHelper:
|
||||
send_packet_fn=self._send_packet,
|
||||
radio_config=radio_config,
|
||||
)
|
||||
|
||||
|
||||
# Register by dest hash
|
||||
hash_byte = identity.get_public_key()[0]
|
||||
self.handlers[hash_byte] = {
|
||||
@@ -92,12 +98,12 @@ class TextHelper:
|
||||
"name": name,
|
||||
"type": identity_type,
|
||||
}
|
||||
|
||||
|
||||
# Track repeater identity for CLI commands
|
||||
if identity_type == "repeater":
|
||||
self.repeater_hash = hash_byte
|
||||
logger.info(f"Set repeater hash for CLI: 0x{hash_byte:02X}")
|
||||
|
||||
|
||||
# Initialize CLI handler now that we have the repeater identity
|
||||
if self.config_path and self.config and self.config_manager:
|
||||
self.cli = MeshCLI(
|
||||
@@ -108,18 +114,20 @@ class TextHelper:
|
||||
enable_regions=True,
|
||||
send_advert_callback=self.send_advert_callback,
|
||||
identity=identity,
|
||||
storage_handler=self.sqlite_handler
|
||||
storage_handler=self.sqlite_handler,
|
||||
)
|
||||
logger.info("Initialized CLI handler for repeater commands with identity and storage")
|
||||
|
||||
logger.info(
|
||||
"Initialized CLI handler for repeater commands with identity and storage"
|
||||
)
|
||||
|
||||
# Create RoomServer instance for room_server identities
|
||||
if identity_type == "room_server" and self.sqlite_handler:
|
||||
try:
|
||||
from .room_server import MAX_UNSYNCED_POSTS
|
||||
|
||||
|
||||
room_config = radio_config or {}
|
||||
max_posts = room_config.get('max_posts', MAX_UNSYNCED_POSTS)
|
||||
|
||||
max_posts = room_config.get("max_posts", MAX_UNSYNCED_POSTS)
|
||||
|
||||
# Enforce hard limit
|
||||
if max_posts > MAX_UNSYNCED_POSTS:
|
||||
logger.warning(
|
||||
@@ -127,7 +135,7 @@ class TextHelper:
|
||||
f"of {MAX_UNSYNCED_POSTS}, capping to {MAX_UNSYNCED_POSTS}"
|
||||
)
|
||||
max_posts = MAX_UNSYNCED_POSTS
|
||||
|
||||
|
||||
room_server = RoomServer(
|
||||
room_hash=hash_byte,
|
||||
room_name=name,
|
||||
@@ -138,31 +146,29 @@ class TextHelper:
|
||||
max_posts=max_posts,
|
||||
config_path=self.config_path,
|
||||
config=self.config,
|
||||
config_manager=self.config_manager
|
||||
config_manager=self.config_manager,
|
||||
)
|
||||
|
||||
|
||||
self.room_servers[hash_byte] = room_server
|
||||
|
||||
|
||||
# Start sync loop
|
||||
asyncio.create_task(room_server.start())
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Registered room server '{name}': hash=0x{hash_byte:02X}, "
|
||||
f"max_posts={max_posts}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create room server '{name}': {e}", exc_info=True)
|
||||
|
||||
logger.info(
|
||||
f"Registered {identity_type} '{name}' text handler: hash=0x{hash_byte:02X}"
|
||||
)
|
||||
|
||||
|
||||
logger.info(f"Registered {identity_type} '{name}' text handler: hash=0x{hash_byte:02X}")
|
||||
|
||||
def _create_acl_contacts_wrapper(self, acl):
|
||||
|
||||
class ACLContactsWrapper:
|
||||
def __init__(self, identity_acl):
|
||||
self._acl = identity_acl
|
||||
|
||||
|
||||
@property
|
||||
def contacts(self):
|
||||
contact_list = []
|
||||
@@ -172,10 +178,10 @@ class TextHelper:
|
||||
def __init__(self, client):
|
||||
self.public_key = client.id.get_public_key().hex()
|
||||
self.name = f"client_{self.public_key[:8]}"
|
||||
|
||||
|
||||
contact_list.append(ContactProxy(client_info))
|
||||
return contact_list
|
||||
|
||||
|
||||
return ACLContactsWrapper(acl)
|
||||
|
||||
async def process_text_packet(self, packet):
|
||||
@@ -183,20 +189,20 @@ class TextHelper:
|
||||
try:
|
||||
if len(packet.payload) < 2:
|
||||
return False
|
||||
|
||||
|
||||
dest_hash = packet.payload[0]
|
||||
src_hash = packet.payload[1]
|
||||
|
||||
|
||||
handler_info = self.handlers.get(dest_hash)
|
||||
if handler_info:
|
||||
logger.debug(
|
||||
f"Routing text message to '{handler_info['name']}': "
|
||||
f"dest=0x{dest_hash:02X}, src=0x{src_hash:02X}"
|
||||
)
|
||||
|
||||
|
||||
# Let handler decrypt the message first
|
||||
await handler_info["handler"](packet)
|
||||
|
||||
|
||||
# Call placeholder for custom processing
|
||||
await self._on_message_received(
|
||||
identity_name=handler_info["name"],
|
||||
@@ -205,16 +211,14 @@ class TextHelper:
|
||||
dest_hash=dest_hash,
|
||||
src_hash=src_hash,
|
||||
)
|
||||
|
||||
|
||||
# Mark packet as handled
|
||||
packet.mark_do_not_retransmit()
|
||||
return True
|
||||
else:
|
||||
logger.debug(
|
||||
f"No text handler for hash 0x{dest_hash:02X}, allowing forward"
|
||||
)
|
||||
logger.debug(f"No text handler for hash 0x{dest_hash:02X}, allowing forward")
|
||||
return False
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing text packet: {e}")
|
||||
return False
|
||||
@@ -230,128 +234,137 @@ class TextHelper:
|
||||
|
||||
# Placeholder - can be overridden or callback can be added
|
||||
logger.debug(
|
||||
f"Message received for {identity_type} '{identity_name}' "
|
||||
f"from 0x{src_hash:02X}"
|
||||
f"Message received for {identity_type} '{identity_name}' " f"from 0x{src_hash:02X}"
|
||||
)
|
||||
|
||||
|
||||
# Extract decrypted message if available
|
||||
if hasattr(packet, "decrypted") and packet.decrypted:
|
||||
message_text = packet.decrypted.get("text", "<unknown>")
|
||||
|
||||
|
||||
# Clean message text - remove null bytes and trailing whitespace
|
||||
message_text = message_text.rstrip('\x00').rstrip()
|
||||
|
||||
logger.info(
|
||||
f"[{identity_type}:{identity_name}] Message: {message_text}"
|
||||
)
|
||||
|
||||
message_text = message_text.rstrip("\x00").rstrip()
|
||||
|
||||
logger.info(f"[{identity_type}:{identity_name}] Message: {message_text}")
|
||||
|
||||
# Handle room server messages
|
||||
if identity_type == "room_server" and dest_hash in self.room_servers:
|
||||
room_server = self.room_servers[dest_hash]
|
||||
|
||||
|
||||
# Check if this is a CLI command FIRST (before storing as post)
|
||||
if self._is_cli_command(message_text):
|
||||
# Handle CLI command - do NOT store as post
|
||||
if room_server and room_server.cli:
|
||||
try:
|
||||
# Check admin permission
|
||||
is_admin = self._check_admin_permission_for_identity(src_hash, dest_hash)
|
||||
|
||||
is_admin = self._check_admin_permission_for_identity(
|
||||
src_hash, dest_hash
|
||||
)
|
||||
|
||||
if not is_admin:
|
||||
logger.warning(f"Room '{identity_name}': CLI command denied from 0x{src_hash:02X} (not admin)")
|
||||
logger.warning(
|
||||
f"Room '{identity_name}': CLI command denied from 0x{src_hash:02X} (not admin)"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# Get sender's full pubkey
|
||||
identity_acl = self.acl_dict.get(dest_hash)
|
||||
sender_pubkey = bytes([src_hash]) + b'\x00' * 31 # Default
|
||||
sender_pubkey = bytes([src_hash]) + b"\x00" * 31 # Default
|
||||
if identity_acl:
|
||||
for client_info in identity_acl.get_all_clients():
|
||||
if client_info.id.get_public_key()[0] == src_hash:
|
||||
sender_pubkey = client_info.id.get_public_key()
|
||||
break
|
||||
|
||||
|
||||
# Handle CLI command
|
||||
reply = room_server.cli.handle_command(
|
||||
sender_pubkey=sender_pubkey,
|
||||
command=message_text,
|
||||
is_admin=is_admin
|
||||
sender_pubkey=sender_pubkey, command=message_text, is_admin=is_admin
|
||||
)
|
||||
|
||||
logger.info(f"Room '{identity_name}': CLI command from 0x{src_hash:02X}: {message_text[:50]} -> {reply[:100]}")
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Room '{identity_name}': CLI command from 0x{src_hash:02X}: {message_text[:50]} -> {reply[:100]}"
|
||||
)
|
||||
|
||||
# Send reply back to sender
|
||||
handler_info = self.handlers.get(dest_hash)
|
||||
if handler_info:
|
||||
await self._send_cli_reply(packet, reply, handler_info)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing room server CLI command: {e}", exc_info=True)
|
||||
|
||||
logger.error(
|
||||
f"Error processing room server CLI command: {e}", exc_info=True
|
||||
)
|
||||
|
||||
# CLI command handled, don't store as post
|
||||
return
|
||||
|
||||
|
||||
# NOT a CLI command - store as regular room post
|
||||
try:
|
||||
# Get sender's full pubkey
|
||||
identity_acl = self.acl_dict.get(dest_hash)
|
||||
sender_pubkey = bytes([src_hash]) + b'\x00' * 31 # Default
|
||||
sender_pubkey = bytes([src_hash]) + b"\x00" * 31 # Default
|
||||
if identity_acl:
|
||||
for client_info in identity_acl.get_all_clients():
|
||||
if client_info.id.get_public_key()[0] == src_hash:
|
||||
sender_pubkey = client_info.id.get_public_key()
|
||||
break
|
||||
|
||||
|
||||
# Store message as post
|
||||
sender_timestamp = int(time.time())
|
||||
success = await room_server.add_post(
|
||||
client_pubkey=sender_pubkey,
|
||||
message_text=message_text,
|
||||
sender_timestamp=sender_timestamp,
|
||||
txt_type=TXT_TYPE_PLAIN
|
||||
txt_type=TXT_TYPE_PLAIN,
|
||||
)
|
||||
|
||||
|
||||
if success:
|
||||
logger.info(f"Room '{identity_name}': New post from {sender_pubkey[:4].hex()}: {message_text[:50]}")
|
||||
|
||||
logger.info(
|
||||
f"Room '{identity_name}': New post from {sender_pubkey[:4].hex()}: {message_text[:50]}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing room post: {e}", exc_info=True)
|
||||
|
||||
|
||||
return
|
||||
|
||||
|
||||
# Check if this is a CLI command to the repeater (AFTER decryption)
|
||||
if dest_hash == self.repeater_hash and self.cli and self._is_cli_command(message_text):
|
||||
try:
|
||||
# Check admin permission
|
||||
is_admin = self._check_admin_permission_for_identity(src_hash, self.repeater_hash)
|
||||
|
||||
is_admin = self._check_admin_permission_for_identity(
|
||||
src_hash, self.repeater_hash
|
||||
)
|
||||
|
||||
# If not admin, log and return without sending reply
|
||||
if not is_admin:
|
||||
logger.warning(f"CLI command denied from 0x{src_hash:02X} (not admin): {message_text[:50]}")
|
||||
logger.warning(
|
||||
f"CLI command denied from 0x{src_hash:02X} (not admin): {message_text[:50]}"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# Get client for full public key
|
||||
repeater_acl = self.acl_dict.get(self.repeater_hash)
|
||||
sender_pubkey = bytes([src_hash]) + b'\x00' * 31 # Default
|
||||
sender_pubkey = bytes([src_hash]) + b"\x00" * 31 # Default
|
||||
if repeater_acl:
|
||||
for client_info in repeater_acl.get_all_clients():
|
||||
if client_info.id.get_public_key()[0] == src_hash:
|
||||
sender_pubkey = client_info.id.get_public_key()
|
||||
break
|
||||
|
||||
|
||||
# Handle CLI command
|
||||
reply = self.cli.handle_command(
|
||||
sender_pubkey=sender_pubkey,
|
||||
command=message_text,
|
||||
is_admin=is_admin
|
||||
sender_pubkey=sender_pubkey, command=message_text, is_admin=is_admin
|
||||
)
|
||||
|
||||
logger.info(f"CLI command from 0x{src_hash:02X}: {message_text[:50]} -> {reply[:100]}")
|
||||
|
||||
|
||||
logger.info(
|
||||
f"CLI command from 0x{src_hash:02X}: {message_text[:50]} -> {reply[:100]}"
|
||||
)
|
||||
|
||||
# Send reply back to sender
|
||||
handler_info = self.handlers.get(dest_hash)
|
||||
if handler_info:
|
||||
await self._send_cli_reply(packet, reply, handler_info)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing CLI command: {e}", exc_info=True)
|
||||
|
||||
@@ -381,7 +394,7 @@ class TextHelper:
|
||||
}
|
||||
for hash_byte, info in self.handlers.items()
|
||||
]
|
||||
|
||||
|
||||
async def cleanup(self):
|
||||
"""Cleanup room servers and handlers."""
|
||||
# Stop all room server sync loops
|
||||
@@ -390,52 +403,68 @@ class TextHelper:
|
||||
await room_server.stop()
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping room server: {e}")
|
||||
|
||||
|
||||
logger.info("TextHelper cleanup complete")
|
||||
|
||||
|
||||
def _is_cli_command(self, message: str) -> bool:
|
||||
"""Check if message looks like a CLI command."""
|
||||
# Strip optional sequence prefix (XX|)
|
||||
if len(message) > 4 and message[2] == '|':
|
||||
if len(message) > 4 and message[2] == "|":
|
||||
message = message[3:].strip()
|
||||
|
||||
|
||||
# Check for known command prefixes
|
||||
command_prefixes = [
|
||||
"get ", "set ", "reboot", "advert", "clock", "time ",
|
||||
"password ", "clear ", "ver", "board", "neighbors", "neighbor.",
|
||||
"tempradio ", "setperm ", "region", "sensor ", "gps", "log ",
|
||||
"stats-", "start ota"
|
||||
"get ",
|
||||
"set ",
|
||||
"reboot",
|
||||
"advert",
|
||||
"clock",
|
||||
"time ",
|
||||
"password ",
|
||||
"clear ",
|
||||
"ver",
|
||||
"board",
|
||||
"neighbors",
|
||||
"neighbor.",
|
||||
"tempradio ",
|
||||
"setperm ",
|
||||
"region",
|
||||
"sensor ",
|
||||
"gps",
|
||||
"log ",
|
||||
"stats-",
|
||||
"start ota",
|
||||
]
|
||||
|
||||
|
||||
return any(message.startswith(prefix) for prefix in command_prefixes)
|
||||
|
||||
|
||||
def _check_admin_permission(self, src_hash: int) -> bool:
|
||||
"""Check if sender has admin permissions for repeater (legacy method)."""
|
||||
return self._check_admin_permission_for_identity(src_hash, self.repeater_hash)
|
||||
|
||||
|
||||
def _check_admin_permission_for_identity(self, src_hash: int, identity_hash: int) -> bool:
|
||||
"""Check if sender has admin permissions (bit 0x02) for a specific identity."""
|
||||
# Get the identity's ACL
|
||||
identity_acl = self.acl_dict.get(identity_hash)
|
||||
if not identity_acl:
|
||||
return False
|
||||
|
||||
|
||||
# Get client by hash byte
|
||||
clients = identity_acl.get_all_clients()
|
||||
for client_info in clients:
|
||||
pubkey = client_info.id.get_public_key()
|
||||
if pubkey[0] == src_hash:
|
||||
# Check admin bit (0x02 = PERM_ACL_ADMIN)
|
||||
permissions = getattr(client_info, 'permissions', 0)
|
||||
permissions = getattr(client_info, "permissions", 0)
|
||||
PERM_ACL_ADMIN = 0x02
|
||||
return (permissions & 0x02) == PERM_ACL_ADMIN
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
async def _send_cli_reply(self, original_packet, reply_text: str, handler_info: dict):
|
||||
"""
|
||||
Send CLI reply back to sender using TXT_MSG datagram.
|
||||
|
||||
|
||||
Follows the C++ pattern (lines 603-609 in MyMesh.cpp):
|
||||
- Creates TXT_MSG datagram with TXT_TYPE_CLI_DATA flag
|
||||
- Encrypts with shared secret from ACL client
|
||||
@@ -443,77 +472,87 @@ class TextHelper:
|
||||
* if out_path_len < 0: sendFlood()
|
||||
* else: sendDirect() with stored out_path
|
||||
"""
|
||||
from pymc_core.protocol import PacketBuilder, Identity
|
||||
from pymc_core.protocol.constants import PAYLOAD_TYPE_TXT_MSG
|
||||
import time
|
||||
|
||||
|
||||
from pymc_core.protocol import Identity, PacketBuilder
|
||||
from pymc_core.protocol.constants import PAYLOAD_TYPE_TXT_MSG
|
||||
|
||||
try:
|
||||
src_hash = original_packet.payload[1]
|
||||
dest_hash = original_packet.payload[0]
|
||||
|
||||
|
||||
incoming_route = original_packet.get_route_type()
|
||||
logger.debug(f"CLI reply: original packet dest=0x{dest_hash:02X}, src=0x{src_hash:02X}, incoming_route={incoming_route}")
|
||||
|
||||
logger.debug(
|
||||
f"CLI reply: original packet dest=0x{dest_hash:02X}, src=0x{src_hash:02X}, incoming_route={incoming_route}"
|
||||
)
|
||||
|
||||
# Find the client in the DESTINATION identity's ACL (not always repeater!)
|
||||
# dest_hash is the identity that received the command (repeater OR room server)
|
||||
identity_acl = self.acl_dict.get(dest_hash)
|
||||
if not identity_acl:
|
||||
logger.error(f"No ACL found for identity 0x{dest_hash:02X} for CLI reply")
|
||||
return
|
||||
|
||||
|
||||
client = None
|
||||
for client_info in identity_acl.get_all_clients():
|
||||
pubkey = client_info.id.get_public_key()
|
||||
if pubkey[0] == src_hash:
|
||||
client = client_info
|
||||
break
|
||||
|
||||
|
||||
if not client:
|
||||
logger.error(f"Client 0x{src_hash:02X} not found in identity 0x{dest_hash:02X} ACL for CLI reply")
|
||||
logger.error(
|
||||
f"Client 0x{src_hash:02X} not found in identity 0x{dest_hash:02X} ACL for CLI reply"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# Get shared secret from client
|
||||
shared_secret = client.shared_secret
|
||||
if not shared_secret or len(shared_secret) == 0:
|
||||
logger.error(f"No shared secret for client 0x{src_hash:02X}")
|
||||
return
|
||||
|
||||
|
||||
# Build reply packet payload
|
||||
# Format: timestamp(4) + flags(1) + reply_text
|
||||
timestamp = int(time.time())
|
||||
TXT_TYPE_CLI_DATA = 0x01
|
||||
flags = (TXT_TYPE_CLI_DATA << 2) # Upper 6 bits are txt_type
|
||||
|
||||
reply_bytes = reply_text.encode('utf-8')
|
||||
plaintext = timestamp.to_bytes(4, 'little') + bytes([flags]) + reply_bytes
|
||||
|
||||
flags = TXT_TYPE_CLI_DATA << 2 # Upper 6 bits are txt_type
|
||||
|
||||
reply_bytes = reply_text.encode("utf-8")
|
||||
plaintext = timestamp.to_bytes(4, "little") + bytes([flags]) + reply_bytes
|
||||
|
||||
# Decide routing based on client->out_path_len (C++ pattern)
|
||||
# out_path is populated by PATH packets, NOT from incoming text message route
|
||||
route_type = "flood" if client.out_path_len < 0 else "direct"
|
||||
logger.debug(f"CLI reply: client.out_path_len={client.out_path_len}, using route_type={route_type}")
|
||||
|
||||
logger.debug(
|
||||
f"CLI reply: client.out_path_len={client.out_path_len}, using route_type={route_type}"
|
||||
)
|
||||
|
||||
reply_packet = PacketBuilder.create_datagram(
|
||||
ptype=PAYLOAD_TYPE_TXT_MSG,
|
||||
dest=client.id,
|
||||
local_identity=handler_info["identity"],
|
||||
secret=shared_secret,
|
||||
plaintext=plaintext,
|
||||
route_type=route_type
|
||||
route_type=route_type,
|
||||
)
|
||||
|
||||
|
||||
|
||||
# Add path for direct routing if available from PATH packets
|
||||
if client.out_path_len >= 0 and len(client.out_path) > 0:
|
||||
reply_packet.path = bytearray(client.out_path[:client.out_path_len])
|
||||
reply_packet.path = bytearray(client.out_path[: client.out_path_len])
|
||||
reply_packet.path_len = client.out_path_len
|
||||
logger.debug(f"CLI reply: Added stored out_path - path_len={reply_packet.path_len}, path={[hex(b) for b in reply_packet.path]}")
|
||||
|
||||
logger.debug(
|
||||
f"CLI reply: Added stored out_path - path_len={reply_packet.path_len}, path={[hex(b) for b in reply_packet.path]}"
|
||||
)
|
||||
|
||||
# Send with delay (CLI_REPLY_DELAY_MILLIS = 600ms in C++)
|
||||
CLI_REPLY_DELAY_MS = 600
|
||||
await asyncio.sleep(CLI_REPLY_DELAY_MS / 1000.0)
|
||||
|
||||
|
||||
await self._send_packet(reply_packet, wait_for_ack=False)
|
||||
logger.info(f"CLI reply sent to 0x{src_hash:02X} via {route_type.upper()}: {reply_text[:50]}")
|
||||
|
||||
logger.info(
|
||||
f"CLI reply sent to 0x{src_hash:02X} via {route_type.upper()}: {reply_text[:50]}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending CLI reply: {e}", exc_info=True)
|
||||
|
||||
@@ -9,7 +9,7 @@ of packets through the mesh network.
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict
|
||||
|
||||
from pymc_core.hardware.signal_utils import snr_register_to_db
|
||||
from pymc_core.node.handlers.trace import TraceHandler
|
||||
@@ -34,10 +34,15 @@ class TraceHelper:
|
||||
self.local_hash = local_hash
|
||||
self.repeater_handler = repeater_handler
|
||||
self.packet_injector = packet_injector # Function to inject packets into router
|
||||
|
||||
|
||||
# Ping callback system - track pending ping requests by tag
|
||||
self.pending_pings = {} # {tag: {'event': asyncio.Event(), 'result': dict, 'target': int, 'sent_at': float}}
|
||||
|
||||
self.pending_pings = (
|
||||
{}
|
||||
) # {tag: {'event': asyncio.Event(), 'result': dict, 'target': int, 'sent_at': float}}
|
||||
|
||||
# Optional: when trace reaches final node, call this (packet, parsed_data) to push 0x89 to companions
|
||||
self.on_trace_complete = None # async (packet, parsed_data) -> None
|
||||
|
||||
# Create TraceHandler internally as a parsing utility
|
||||
self.trace_handler = TraceHandler(log_fn=log_fn or logger.info)
|
||||
|
||||
@@ -60,9 +65,7 @@ class TraceHelper:
|
||||
parsed_data = self.trace_handler._parse_trace_payload(packet.payload)
|
||||
|
||||
if not parsed_data.get("valid", False):
|
||||
logger.warning(
|
||||
f"Invalid trace packet: {parsed_data.get('error', 'Unknown error')}"
|
||||
)
|
||||
logger.warning(f"Invalid trace packet: {parsed_data.get('error', 'Unknown error')}")
|
||||
return
|
||||
|
||||
trace_path = parsed_data["trace_path"]
|
||||
@@ -73,14 +76,14 @@ class TraceHelper:
|
||||
if trace_tag in self.pending_pings:
|
||||
ping_info = self.pending_pings[trace_tag]
|
||||
# Store response data
|
||||
ping_info['result'] = {
|
||||
'path': trace_path,
|
||||
'snr': packet.get_snr(),
|
||||
'rssi': getattr(packet, "rssi", 0),
|
||||
'received_at': time.time()
|
||||
ping_info["result"] = {
|
||||
"path": trace_path,
|
||||
"snr": packet.get_snr(),
|
||||
"rssi": getattr(packet, "rssi", 0),
|
||||
"received_at": time.time(),
|
||||
}
|
||||
# Signal the waiting coroutine
|
||||
ping_info['event'].set()
|
||||
ping_info["event"].set()
|
||||
logger.info(f"Ping response received for tag {trace_tag}")
|
||||
|
||||
# Record the trace packet for dashboard/statistics
|
||||
@@ -107,6 +110,12 @@ class TraceHelper:
|
||||
else:
|
||||
# This is the final destination or can't forward - just log and record
|
||||
self._log_no_forward_reason(packet, trace_path, trace_path_len)
|
||||
# When trace completed (reached end of path), push PUSH_CODE_TRACE_DATA (0x89) to companions (firmware onTraceRecv)
|
||||
if packet.path_len >= trace_path_len and self.on_trace_complete:
|
||||
try:
|
||||
await self.on_trace_complete(packet, parsed_data)
|
||||
except Exception as e:
|
||||
logger.debug("on_trace_complete error: %s", e)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing trace packet: {e}")
|
||||
@@ -140,27 +149,37 @@ class TraceHelper:
|
||||
|
||||
# Add detailed SNR info if we have the corresponding hash
|
||||
if i < len(trace_path):
|
||||
path_snr_details.append({
|
||||
"hash": f"{trace_path[i]:02X}",
|
||||
"snr_raw": snr_val,
|
||||
"snr_db": snr_db
|
||||
})
|
||||
path_snr_details.append(
|
||||
{"hash": f"{trace_path[i]:02X}", "snr_raw": snr_val, "snr_db": snr_db}
|
||||
)
|
||||
|
||||
return {
|
||||
"timestamp": time.time(),
|
||||
"header": f"0x{packet.header:02X}" if hasattr(packet, "header") and packet.header is not None else None,
|
||||
"payload": packet.payload.hex() if hasattr(packet, "payload") and packet.payload else None,
|
||||
"payload_length": len(packet.payload) if hasattr(packet, "payload") and packet.payload else 0,
|
||||
"header": (
|
||||
f"0x{packet.header:02X}"
|
||||
if hasattr(packet, "header") and packet.header is not None
|
||||
else None
|
||||
),
|
||||
"payload": (
|
||||
packet.payload.hex() if hasattr(packet, "payload") and packet.payload else None
|
||||
),
|
||||
"payload_length": (
|
||||
len(packet.payload) if hasattr(packet, "payload") and packet.payload else 0
|
||||
),
|
||||
"type": packet.get_payload_type(), # 0x09 for trace
|
||||
"route": packet.get_route_type(), # Should be direct (1)
|
||||
"route": packet.get_route_type(), # Should be direct (1)
|
||||
"length": len(packet.payload or b""),
|
||||
"rssi": getattr(packet, "rssi", 0),
|
||||
"snr": getattr(packet, "snr", 0.0),
|
||||
"score": self.repeater_handler.calculate_packet_score(
|
||||
getattr(packet, "snr", 0.0),
|
||||
len(packet.payload or b""),
|
||||
self.repeater_handler.radio_config.get("spreading_factor", 8)
|
||||
) if self.repeater_handler else 0.0,
|
||||
"score": (
|
||||
self.repeater_handler.calculate_packet_score(
|
||||
getattr(packet, "snr", 0.0),
|
||||
len(packet.payload or b""),
|
||||
self.repeater_handler.radio_config.get("spreading_factor", 8),
|
||||
)
|
||||
if self.repeater_handler
|
||||
else 0.0
|
||||
),
|
||||
"tx_delay_ms": 0,
|
||||
"transmitted": False,
|
||||
"is_duplicate": False,
|
||||
@@ -217,21 +236,24 @@ class TraceHelper:
|
||||
True if the packet should be forwarded, False otherwise
|
||||
"""
|
||||
# Use the exact logic from the original working code
|
||||
return (packet.path_len < trace_path_len and
|
||||
len(trace_path) > packet.path_len and
|
||||
trace_path[packet.path_len] == self.local_hash and
|
||||
self.repeater_handler and not self.repeater_handler.is_duplicate(packet))
|
||||
return (
|
||||
packet.path_len < trace_path_len
|
||||
and len(trace_path) > packet.path_len
|
||||
and trace_path[packet.path_len] == self.local_hash
|
||||
and self.repeater_handler
|
||||
and not self.repeater_handler.is_duplicate(packet)
|
||||
)
|
||||
|
||||
async def _forward_trace_packet(self, packet, trace_path_len: int) -> None:
|
||||
"""
|
||||
Forward a trace packet by appending SNR and sending via injection.
|
||||
|
||||
|
||||
Args:
|
||||
packet: The trace packet to forward
|
||||
trace_path_len: The length of the trace path
|
||||
"""
|
||||
# Update the packet record to show it will be transmitted
|
||||
if self.repeater_handler and hasattr(self.repeater_handler, 'recent_packets'):
|
||||
if self.repeater_handler and hasattr(self.repeater_handler, "recent_packets"):
|
||||
packet_hash = packet.calculate_packet_hash().hex().upper()[:16]
|
||||
for record in reversed(self.repeater_handler.recent_packets):
|
||||
if record.get("packet_hash") == packet_hash:
|
||||
@@ -284,41 +306,44 @@ class TraceHelper:
|
||||
elif len(trace_path) <= packet.path_len:
|
||||
logger.info("Path index out of bounds")
|
||||
elif trace_path[packet.path_len] != self.local_hash:
|
||||
expected_hash = trace_path[packet.path_len] if packet.path_len < len(trace_path) else None
|
||||
expected_hash = (
|
||||
trace_path[packet.path_len] if packet.path_len < len(trace_path) else None
|
||||
)
|
||||
logger.info(f"Not our turn (next hop: 0x{expected_hash:02x})")
|
||||
elif self.repeater_handler and self.repeater_handler.is_duplicate(packet):
|
||||
logger.info("Duplicate packet, ignoring")
|
||||
|
||||
def register_ping(self, tag: int, target_hash: int) -> asyncio.Event:
|
||||
"""Register a ping request and return an event to wait on.
|
||||
|
||||
|
||||
Args:
|
||||
tag: The unique trace tag for this ping
|
||||
target_hash: The hash of the target node
|
||||
|
||||
|
||||
Returns:
|
||||
asyncio.Event that will be set when response is received
|
||||
"""
|
||||
event = asyncio.Event()
|
||||
self.pending_pings[tag] = {
|
||||
'event': event,
|
||||
'result': None,
|
||||
'target': target_hash,
|
||||
'sent_at': time.time()
|
||||
"event": event,
|
||||
"result": None,
|
||||
"target": target_hash,
|
||||
"sent_at": time.time(),
|
||||
}
|
||||
logger.debug(f"Registered ping with tag {tag} for target 0x{target_hash:02x}")
|
||||
return event
|
||||
|
||||
def cleanup_stale_pings(self, max_age_seconds: int = 30):
|
||||
"""Remove pending pings older than max_age_seconds.
|
||||
|
||||
|
||||
Args:
|
||||
max_age_seconds: Maximum age in seconds before a ping is considered stale
|
||||
"""
|
||||
current_time = time.time()
|
||||
stale_tags = [
|
||||
tag for tag, info in self.pending_pings.items()
|
||||
if current_time - info['sent_at'] > max_age_seconds
|
||||
tag
|
||||
for tag, info in self.pending_pings.items()
|
||||
if current_time - info["sent_at"] > max_age_seconds
|
||||
]
|
||||
for tag in stale_tags:
|
||||
self.pending_pings.pop(tag)
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import logging
|
||||
from typing import Dict, Optional, Tuple, Any
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
|
||||
logger = logging.getLogger("IdentityManager")
|
||||
|
||||
|
||||
class IdentityManager:
|
||||
|
||||
|
||||
def __init__(self, config: dict):
|
||||
self.config = config
|
||||
self.identities: Dict[int, Tuple[Any, dict, str]] = {}
|
||||
self.named_identities: Dict[str, Tuple[Any, dict, str]] = {}
|
||||
self.registered_hashes: Dict[int, str] = {}
|
||||
|
||||
|
||||
def register_identity(self, name: str, identity, config: dict, identity_type: str):
|
||||
hash_byte = identity.get_public_key()[0]
|
||||
|
||||
|
||||
if hash_byte in self.identities:
|
||||
existing_name = self.registered_hashes.get(hash_byte, "unknown")
|
||||
logger.error(
|
||||
@@ -22,40 +22,43 @@ class IdentityManager:
|
||||
f"conflicts with existing identity '{existing_name}'"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
self.identities[hash_byte] = (identity, config, identity_type)
|
||||
self.named_identities[name] = (identity, config, identity_type)
|
||||
self.registered_hashes[hash_byte] = f"{identity_type}:{name}"
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Identity registered: name={name}, hash=0x{hash_byte:02X}, type={identity_type}"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def get_identity_by_hash(self, hash_byte: int) -> Optional[Tuple[Any, dict, str]]:
|
||||
return self.identities.get(hash_byte)
|
||||
|
||||
|
||||
def get_identity_by_name(self, name: str) -> Optional[Tuple[Any, dict, str]]:
|
||||
return self.named_identities.get(name)
|
||||
|
||||
|
||||
def has_identity(self, hash_byte: int) -> bool:
|
||||
return hash_byte in self.identities
|
||||
|
||||
|
||||
def list_identities(self) -> list:
|
||||
identities = []
|
||||
for hash_byte, (identity, config, id_type) in self.identities.items():
|
||||
name = self.registered_hashes.get(hash_byte, "unknown")
|
||||
identities.append({
|
||||
"hash": f"0x{hash_byte:02X}",
|
||||
"name": name,
|
||||
"type": id_type,
|
||||
"address": identity.get_address_bytes().hex() if identity else "N/A"
|
||||
})
|
||||
identities.append(
|
||||
{
|
||||
"hash": f"0x{hash_byte:02X}",
|
||||
"name": name,
|
||||
"type": id_type,
|
||||
"address": identity.get_address_bytes().hex() if identity else "N/A",
|
||||
"public_key": identity.get_public_key().hex() if identity else None,
|
||||
}
|
||||
)
|
||||
return identities
|
||||
|
||||
|
||||
def has_identity_type(self, identity_type: str) -> bool:
|
||||
return any(id_type == identity_type for _, _, id_type in self.identities.values())
|
||||
|
||||
|
||||
def get_identities_by_type(self, identity_type: str) -> list:
|
||||
results = []
|
||||
for name, (identity, config, id_type) in self.named_identities.items():
|
||||
|
||||
+572
-97
@@ -2,14 +2,24 @@ import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from repeater.config import get_radio_for_board, load_config
|
||||
from repeater.companion.utils import validate_companion_node_name
|
||||
from repeater.config import get_radio_for_board, load_config, save_config
|
||||
from repeater.config_manager import ConfigManager
|
||||
from repeater.engine import RepeaterHandler
|
||||
from repeater.web.http_server import HTTPStatsServer, _log_buffer
|
||||
from repeater.handler_helpers import TraceHelper, DiscoveryHelper, AdvertHelper, LoginHelper, TextHelper, PathHelper, ProtocolRequestHelper
|
||||
from repeater.packet_router import PacketRouter
|
||||
from repeater.handler_helpers import (
|
||||
AdvertHelper,
|
||||
DiscoveryHelper,
|
||||
LoginHelper,
|
||||
PathHelper,
|
||||
ProtocolRequestHelper,
|
||||
TextHelper,
|
||||
TraceHelper,
|
||||
)
|
||||
from repeater.identity_manager import IdentityManager
|
||||
from repeater.packet_router import PacketRouter
|
||||
from repeater.web.http_server import HTTPStatsServer, _log_buffer
|
||||
|
||||
logger = logging.getLogger("RepeaterDaemon")
|
||||
|
||||
@@ -36,7 +46,8 @@ class RepeaterDaemon:
|
||||
self.protocol_request_helper = None
|
||||
self.acl = None
|
||||
self.router = None
|
||||
|
||||
self.companion_bridges: dict[int, object] = {}
|
||||
self.companion_frame_servers: list = []
|
||||
|
||||
log_level = config.get("logging", {}).get("level", "INFO")
|
||||
logging.basicConfig(
|
||||
@@ -57,36 +68,40 @@ class RepeaterDaemon:
|
||||
logger.info(f"Initializing radio hardware... (radio_type={radio_type})")
|
||||
try:
|
||||
self.radio = get_radio_for_board(self.config)
|
||||
|
||||
if hasattr(self.radio, 'set_custom_cad_thresholds'):
|
||||
|
||||
# KISS modem: schedule RX callbacks on the event loop for thread safety
|
||||
if hasattr(self.radio, "set_event_loop"):
|
||||
self.radio.set_event_loop(asyncio.get_running_loop())
|
||||
|
||||
if hasattr(self.radio, "set_custom_cad_thresholds"):
|
||||
# Load CAD settings from config, with defaults
|
||||
cad_config = self.config.get("radio", {}).get("cad", {})
|
||||
peak_threshold = cad_config.get("peak_threshold", 23)
|
||||
min_threshold = cad_config.get("min_threshold", 11)
|
||||
|
||||
|
||||
self.radio.set_custom_cad_thresholds(peak=peak_threshold, min_val=min_threshold)
|
||||
logger.info(f"CAD thresholds set from config: peak={peak_threshold}, min={min_threshold}")
|
||||
logger.info(
|
||||
f"CAD thresholds set from config: peak={peak_threshold}, min={min_threshold}"
|
||||
)
|
||||
else:
|
||||
logger.warning("Radio does not support CAD configuration")
|
||||
|
||||
|
||||
if hasattr(self.radio, 'get_frequency'):
|
||||
if hasattr(self.radio, "get_frequency"):
|
||||
logger.info(f"Radio config - Freq: {self.radio.get_frequency():.1f}MHz")
|
||||
if hasattr(self.radio, 'get_spreading_factor'):
|
||||
if hasattr(self.radio, "get_spreading_factor"):
|
||||
logger.info(f"Radio config - SF: {self.radio.get_spreading_factor()}")
|
||||
if hasattr(self.radio, 'get_bandwidth'):
|
||||
if hasattr(self.radio, "get_bandwidth"):
|
||||
logger.info(f"Radio config - BW: {self.radio.get_bandwidth()}kHz")
|
||||
if hasattr(self.radio, 'get_coding_rate'):
|
||||
if hasattr(self.radio, "get_coding_rate"):
|
||||
logger.info(f"Radio config - CR: {self.radio.get_coding_rate()}")
|
||||
if hasattr(self.radio, 'get_tx_power'):
|
||||
if hasattr(self.radio, "get_tx_power"):
|
||||
logger.info(f"Radio config - TX Power: {self.radio.get_tx_power()}dBm")
|
||||
|
||||
|
||||
logger.info("Radio hardware initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize radio hardware: {e}")
|
||||
raise RuntimeError("Repeater requires real LoRa hardware") from e
|
||||
|
||||
|
||||
try:
|
||||
from pymc_core import LocalIdentity
|
||||
from pymc_core.node.dispatcher import Dispatcher
|
||||
@@ -110,7 +125,8 @@ class RepeaterDaemon:
|
||||
|
||||
pubkey = local_identity.get_public_key()
|
||||
self.local_hash = pubkey[0]
|
||||
|
||||
self.local_hash_bytes = bytes(pubkey[:3])
|
||||
|
||||
logger.info(f"Local identity set: {local_identity.get_address_bytes().hex()}")
|
||||
local_hash_hex = f"0x{self.local_hash:02x}"
|
||||
logger.info(f"Local node hash (from identity): {local_hash_hex}")
|
||||
@@ -121,13 +137,15 @@ class RepeaterDaemon:
|
||||
self.dispatcher._is_own_packet = lambda pkt: False
|
||||
|
||||
self.repeater_handler = RepeaterHandler(
|
||||
self.config, self.dispatcher, self.local_hash, send_advert_func=self.send_advert
|
||||
self.config, self.dispatcher, self.local_hash,
|
||||
local_hash_bytes=self.local_hash_bytes,
|
||||
send_advert_func=self.send_advert,
|
||||
)
|
||||
|
||||
# Create router
|
||||
self.router = PacketRouter(self)
|
||||
await self.router.start()
|
||||
|
||||
|
||||
# Register router as entry point for ALL packets via fallback handler
|
||||
# All received packets flow through router → helpers → repeater engine
|
||||
self.dispatcher.register_fallback_handler(self._router_callback)
|
||||
@@ -141,7 +159,7 @@ class RepeaterDaemon:
|
||||
log_fn=logger.info,
|
||||
)
|
||||
logger.info("Trace processing helper initialized")
|
||||
|
||||
|
||||
# Create advert helper for neighbor tracking
|
||||
self.advert_helper = AdvertHelper(
|
||||
local_identity=self.local_identity,
|
||||
@@ -159,6 +177,7 @@ class RepeaterDaemon:
|
||||
packet_injector=self.router.inject_packet,
|
||||
node_type=2,
|
||||
log_fn=logger.info,
|
||||
debug_log_fn=logger.debug,
|
||||
)
|
||||
logger.info("Discovery processing helper initialized")
|
||||
else:
|
||||
@@ -170,73 +189,81 @@ class RepeaterDaemon:
|
||||
packet_injector=self.router.inject_packet,
|
||||
log_fn=logger.info,
|
||||
)
|
||||
|
||||
|
||||
# Register default repeater identity
|
||||
self.login_helper.register_identity(
|
||||
name="repeater",
|
||||
identity=self.local_identity,
|
||||
identity_type="repeater",
|
||||
config=self.config # Pass full config so repeater can access top-level security section
|
||||
config=self.config, # Pass full config so repeater can access top-level security section
|
||||
)
|
||||
|
||||
|
||||
# Register room server identities with their configs
|
||||
for name, identity, config in self.identity_manager.get_identities_by_type("room_server"):
|
||||
for name, identity, config in self.identity_manager.get_identities_by_type(
|
||||
"room_server"
|
||||
):
|
||||
self.login_helper.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
name=name,
|
||||
identity=identity,
|
||||
identity_type="room_server",
|
||||
config=config # Pass room-specific config
|
||||
config=config, # Pass room-specific config
|
||||
)
|
||||
|
||||
|
||||
logger.info("Login processing helper initialized")
|
||||
|
||||
|
||||
# Initialize ConfigManager for centralized config management
|
||||
self.config_manager = ConfigManager(
|
||||
config_path=getattr(self, 'config_path', '/etc/pymc_repeater/config.yaml'),
|
||||
config_path=getattr(self, "config_path", "/etc/pymc_repeater/config.yaml"),
|
||||
config=self.config,
|
||||
daemon_instance=self
|
||||
daemon_instance=self,
|
||||
)
|
||||
logger.info("Config manager initialized")
|
||||
|
||||
|
||||
# Initialize text message helper with per-identity ACLs
|
||||
self.text_helper = TextHelper(
|
||||
identity_manager=self.identity_manager,
|
||||
packet_injector=self.router.inject_packet,
|
||||
acl_dict=self.login_helper.get_acl_dict(), # Per-identity ACLs
|
||||
log_fn=logger.info,
|
||||
config_path=getattr(self, 'config_path', None), # For CLI to save changes
|
||||
config_path=getattr(self, "config_path", None), # For CLI to save changes
|
||||
config=self.config, # For CLI to read/modify settings
|
||||
config_manager=self.config_manager, # New centralized config manager
|
||||
sqlite_handler=self.repeater_handler.storage.sqlite_handler if self.repeater_handler and self.repeater_handler.storage else None, # For room server database
|
||||
sqlite_handler=(
|
||||
self.repeater_handler.storage.sqlite_handler
|
||||
if self.repeater_handler and self.repeater_handler.storage
|
||||
else None
|
||||
), # For room server database
|
||||
send_advert_callback=self.send_advert, # For CLI advert command
|
||||
)
|
||||
|
||||
|
||||
# Register default repeater identity for text messages
|
||||
self.text_helper.register_identity(
|
||||
name="repeater",
|
||||
identity=self.local_identity,
|
||||
identity_type="repeater",
|
||||
radio_config=self.config.get("radio", {})
|
||||
radio_config=self.config.get("radio", {}),
|
||||
)
|
||||
|
||||
|
||||
# Register room server identities for text messages
|
||||
for name, identity, config in self.identity_manager.get_identities_by_type("room_server"):
|
||||
for name, identity, config in self.identity_manager.get_identities_by_type(
|
||||
"room_server"
|
||||
):
|
||||
self.text_helper.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
identity_type="room_server",
|
||||
radio_config=config # Pass room-specific config (includes max_posts, etc.)
|
||||
radio_config=config, # Pass room-specific config (includes max_posts, etc.)
|
||||
)
|
||||
|
||||
|
||||
logger.info("Text message processing helper initialized")
|
||||
|
||||
|
||||
# Initialize PATH packet helper for updating client out_path
|
||||
self.path_helper = PathHelper(
|
||||
acl_dict=self.login_helper.get_acl_dict(), # Per-identity ACLs
|
||||
log_fn=logger.info,
|
||||
)
|
||||
logger.info("PATH packet processing helper initialized")
|
||||
|
||||
|
||||
# Initialize protocol request handler for status/telemetry requests
|
||||
self.protocol_request_helper = ProtocolRequestHelper(
|
||||
identity_manager=self.identity_manager,
|
||||
@@ -248,34 +275,44 @@ class RepeaterDaemon:
|
||||
)
|
||||
# Register repeater identity for protocol requests
|
||||
self.protocol_request_helper.register_identity(
|
||||
name="repeater",
|
||||
identity=self.local_identity,
|
||||
identity_type="repeater"
|
||||
name="repeater", identity=self.local_identity, identity_type="repeater"
|
||||
)
|
||||
logger.info("Protocol request handler initialized")
|
||||
|
||||
# Load companion identities (CompanionBridge + frame server per companion)
|
||||
await self._load_companion_identities()
|
||||
|
||||
# Subscribe to raw RX in pyMC_core so we can push PUSH_CODE_LOG_RX_DATA to companion clients
|
||||
self.dispatcher.add_raw_rx_subscriber(self._on_raw_rx_for_companions)
|
||||
n = len(getattr(self, "companion_frame_servers", []))
|
||||
logger.info(
|
||||
"Raw RX subscriber registered (%s companion frame server(s)). Connect a client to see rx_log (0x88).",
|
||||
n,
|
||||
)
|
||||
|
||||
# When trace reaches final node, push PUSH_CODE_TRACE_DATA (0x89) to companion clients (firmware onTraceRecv)
|
||||
self.trace_helper.on_trace_complete = self._on_trace_complete_for_companions
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize dispatcher: {e}")
|
||||
raise
|
||||
|
||||
async def _load_additional_identities(self):
|
||||
from pymc_core import LocalIdentity
|
||||
|
||||
|
||||
identities_config = self.config.get("identities", {})
|
||||
|
||||
|
||||
# Load room server identities
|
||||
room_servers = identities_config.get("room_servers") or []
|
||||
for room_config in room_servers:
|
||||
try:
|
||||
name = room_config.get("name")
|
||||
identity_key = room_config.get("identity_key")
|
||||
|
||||
|
||||
if not name or not identity_key:
|
||||
logger.warning(
|
||||
f"Skipping room server config: missing name or identity_key"
|
||||
)
|
||||
logger.warning(f"Skipping room server config: missing name or identity_key")
|
||||
continue
|
||||
|
||||
|
||||
# Convert identity_key to bytes if it's a hex string
|
||||
if isinstance(identity_key, bytes):
|
||||
identity_key_bytes = identity_key
|
||||
@@ -283,46 +320,429 @@ class RepeaterDaemon:
|
||||
try:
|
||||
identity_key_bytes = bytes.fromhex(identity_key)
|
||||
if len(identity_key_bytes) != 32:
|
||||
logger.error(f"Identity key for '{name}' is invalid length: {len(identity_key_bytes)} bytes (expected 32)")
|
||||
logger.error(
|
||||
f"Identity key for '{name}' is invalid length: {len(identity_key_bytes)} bytes (expected 32)"
|
||||
)
|
||||
continue
|
||||
except ValueError as e:
|
||||
logger.error(f"Identity key for '{name}' is not valid hex: {e}")
|
||||
continue
|
||||
else:
|
||||
logger.error(f"Identity key for '{name}' has unknown type: {type(identity_key)}")
|
||||
logger.error(
|
||||
f"Identity key for '{name}' has unknown type: {type(identity_key)}"
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
# Create the identity
|
||||
room_identity = LocalIdentity(seed=identity_key_bytes)
|
||||
|
||||
|
||||
# Register with the manager and all helpers
|
||||
success = self._register_identity_everywhere(
|
||||
name=name,
|
||||
identity=room_identity,
|
||||
config=room_config,
|
||||
identity_type="room_server"
|
||||
identity_type="room_server",
|
||||
)
|
||||
|
||||
|
||||
if success:
|
||||
room_hash = room_identity.get_public_key()[0]
|
||||
logger.info(
|
||||
f"Loaded room server '{name}': hash=0x{room_hash:02x}, "
|
||||
f"address={room_identity.get_address_bytes().hex()}"
|
||||
)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load room server identity '{name}': {e}")
|
||||
|
||||
|
||||
# Summary logging
|
||||
total_identities = len(self.identity_manager.list_identities())
|
||||
logger.info(f"Identity manager loaded {total_identities} total identities")
|
||||
|
||||
def _register_identity_everywhere(
|
||||
async def _load_companion_identities(self) -> None:
|
||||
"""Load companion identities from config and create CompanionBridge + frame server for each."""
|
||||
from pymc_core import LocalIdentity
|
||||
from pymc_core.companion.models import Channel, Contact
|
||||
|
||||
from repeater.companion import CompanionFrameServer, RepeaterCompanionBridge
|
||||
|
||||
companions_config = self.config.get("identities", {}).get("companions") or []
|
||||
if not companions_config:
|
||||
return
|
||||
|
||||
sqlite_handler = None
|
||||
if self.repeater_handler and self.repeater_handler.storage:
|
||||
sqlite_handler = self.repeater_handler.storage.sqlite_handler
|
||||
|
||||
radio_config = (
|
||||
self.repeater_handler.radio_config
|
||||
if self.repeater_handler
|
||||
else self.config.get("radio", {})
|
||||
)
|
||||
|
||||
for comp_config in companions_config:
|
||||
try:
|
||||
name = comp_config.get("name")
|
||||
identity_key = comp_config.get("identity_key")
|
||||
settings = comp_config.get("settings") or {}
|
||||
|
||||
if not name or not identity_key:
|
||||
logger.warning("Skipping companion config: missing name or identity_key")
|
||||
continue
|
||||
|
||||
if isinstance(identity_key, str):
|
||||
try:
|
||||
identity_key_bytes = bytes.fromhex(identity_key)
|
||||
except ValueError as e:
|
||||
logger.error(f"Companion '{name}' identity_key invalid hex: {e}")
|
||||
continue
|
||||
elif isinstance(identity_key, bytes):
|
||||
identity_key_bytes = identity_key
|
||||
else:
|
||||
logger.error(f"Companion '{name}' identity_key has unknown type")
|
||||
continue
|
||||
|
||||
if len(identity_key_bytes) not in (32, 64):
|
||||
logger.error(
|
||||
f"Companion '{name}' identity_key must be 32 bytes (hex) or 64 bytes (MeshCore firmware key)"
|
||||
)
|
||||
continue
|
||||
|
||||
identity = LocalIdentity(seed=identity_key_bytes)
|
||||
pubkey = identity.get_public_key()
|
||||
companion_hash = pubkey[0]
|
||||
companion_hash_str = f"0x{companion_hash:02x}"
|
||||
|
||||
node_name = settings.get("node_name", name)
|
||||
tcp_port = settings.get("tcp_port", 5000)
|
||||
bind_address = settings.get("bind_address", "0.0.0.0")
|
||||
|
||||
def _make_sync_node_name_to_config(companion_name: str):
|
||||
"""Return a callback that syncs node_name to config for this companion (binds name at creation)."""
|
||||
def _sync(new_node_name: str) -> None:
|
||||
try:
|
||||
validated = validate_companion_node_name(new_node_name)
|
||||
except ValueError:
|
||||
return
|
||||
companions = (self.config.get("identities") or {}).get("companions") or []
|
||||
for entry in companions:
|
||||
if entry.get("name") == companion_name:
|
||||
if "settings" not in entry:
|
||||
entry["settings"] = {}
|
||||
entry["settings"]["node_name"] = validated
|
||||
config_path = getattr(self, "config_path", None)
|
||||
if config_path:
|
||||
save_config(self.config, config_path)
|
||||
break
|
||||
return _sync
|
||||
|
||||
bridge = RepeaterCompanionBridge(
|
||||
identity=identity,
|
||||
packet_injector=self.router.inject_packet,
|
||||
node_name=node_name,
|
||||
radio_config=radio_config,
|
||||
sqlite_handler=sqlite_handler,
|
||||
companion_hash=companion_hash_str,
|
||||
on_prefs_saved=_make_sync_node_name_to_config(name),
|
||||
)
|
||||
|
||||
# Load contacts from SQLite
|
||||
if sqlite_handler:
|
||||
contact_rows = sqlite_handler.companion_load_contacts(companion_hash_str)
|
||||
if contact_rows:
|
||||
records = []
|
||||
for row in contact_rows:
|
||||
d = dict(row)
|
||||
d["public_key"] = d.pop("pubkey", d.get("public_key", b""))
|
||||
records.append(d)
|
||||
bridge.contacts.load_from_dicts(records)
|
||||
|
||||
# Load channels from SQLite (normalize secret to 32 bytes to match
|
||||
# CompanionBase.set_channel and GroupTextHandler/PacketBuilder)
|
||||
channel_rows = sqlite_handler.companion_load_channels(companion_hash_str)
|
||||
for row in channel_rows:
|
||||
s = row.get("secret", b"")
|
||||
if isinstance(s, bytes):
|
||||
raw = s
|
||||
elif isinstance(s, (bytearray, memoryview)):
|
||||
raw = bytes(s)
|
||||
elif s:
|
||||
raw = bytes.fromhex(s if isinstance(s, str) else str(s))
|
||||
else:
|
||||
raw = b""
|
||||
if len(raw) < 32:
|
||||
raw = raw + b"\x00" * (32 - len(raw))
|
||||
elif len(raw) > 32:
|
||||
raw = raw[:32]
|
||||
ch = Channel(name=row.get("name", ""), secret=raw)
|
||||
bridge.channels.set(row.get("channel_idx", 0), ch)
|
||||
|
||||
# Preload queued messages from SQLite into bridge
|
||||
for msg_dict in sqlite_handler.companion_load_messages(companion_hash_str):
|
||||
from pymc_core.companion.models import QueuedMessage
|
||||
|
||||
sk = msg_dict.get("sender_key", b"")
|
||||
if isinstance(sk, str):
|
||||
sk = bytes.fromhex(sk)
|
||||
bridge.message_queue.push(
|
||||
QueuedMessage(
|
||||
sender_key=sk,
|
||||
txt_type=msg_dict.get("txt_type", 0),
|
||||
timestamp=msg_dict.get("timestamp", 0),
|
||||
text=msg_dict.get("text", ""),
|
||||
is_channel=bool(msg_dict.get("is_channel", False)),
|
||||
channel_idx=msg_dict.get("channel_idx", 0),
|
||||
path_len=msg_dict.get("path_len", 0),
|
||||
)
|
||||
)
|
||||
|
||||
# Ensure public channel (0) exists with default key for new companions
|
||||
from repeater.companion.constants import DEFAULT_PUBLIC_CHANNEL_SECRET
|
||||
|
||||
if bridge.get_channel(0) is None:
|
||||
bridge.set_channel(0, "Public", DEFAULT_PUBLIC_CHANNEL_SECRET)
|
||||
|
||||
self.companion_bridges[companion_hash] = bridge
|
||||
|
||||
frame_server = CompanionFrameServer(
|
||||
bridge=bridge,
|
||||
companion_hash=companion_hash_str,
|
||||
port=tcp_port,
|
||||
bind_address=bind_address,
|
||||
sqlite_handler=sqlite_handler,
|
||||
local_hash=self.local_hash,
|
||||
stats_getter=self._get_companion_stats,
|
||||
control_handler=(
|
||||
self.discovery_helper.control_handler if self.discovery_helper else None
|
||||
),
|
||||
)
|
||||
await frame_server.start()
|
||||
self.companion_frame_servers.append(frame_server)
|
||||
|
||||
self.identity_manager.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
config=comp_config,
|
||||
identity_type="companion",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Loaded companion '{name}': hash=0x{companion_hash:02x}, "
|
||||
f"port={tcp_port}, bind={bind_address}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load companion '{name}': {e}", exc_info=True)
|
||||
|
||||
async def add_companion_from_config(self, comp_config: dict) -> None:
|
||||
"""
|
||||
Load a single companion from config and register it (hot-reload).
|
||||
Creates RepeaterCompanionBridge, CompanionFrameServer, starts the server,
|
||||
and registers with identity_manager. Raises on error.
|
||||
"""
|
||||
from pymc_core import LocalIdentity
|
||||
from pymc_core.companion.models import Channel
|
||||
|
||||
from repeater.companion import CompanionFrameServer, RepeaterCompanionBridge
|
||||
from repeater.companion.constants import DEFAULT_PUBLIC_CHANNEL_SECRET
|
||||
|
||||
name = comp_config.get("name")
|
||||
identity_key = comp_config.get("identity_key")
|
||||
settings = comp_config.get("settings") or {}
|
||||
|
||||
if not name or not identity_key:
|
||||
raise ValueError("Companion config missing name or identity_key")
|
||||
|
||||
if isinstance(identity_key, str):
|
||||
try:
|
||||
identity_key_bytes = bytes.fromhex(identity_key)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Companion '{name}' identity_key invalid hex: {e}") from e
|
||||
elif isinstance(identity_key, bytes):
|
||||
identity_key_bytes = identity_key
|
||||
else:
|
||||
raise ValueError(f"Companion '{name}' identity_key has unknown type")
|
||||
|
||||
if len(identity_key_bytes) not in (32, 64):
|
||||
raise ValueError(
|
||||
f"Companion '{name}' identity_key must be 32 bytes (hex) or 64 bytes (MeshCore firmware key)"
|
||||
)
|
||||
|
||||
# Already registered?
|
||||
if name in self.identity_manager.named_identities:
|
||||
raise ValueError(f"Companion '{name}' is already registered")
|
||||
|
||||
identity = LocalIdentity(seed=identity_key_bytes)
|
||||
pubkey = identity.get_public_key()
|
||||
companion_hash = pubkey[0]
|
||||
companion_hash_str = f"0x{companion_hash:02x}"
|
||||
|
||||
if companion_hash in self.companion_bridges:
|
||||
raise ValueError(f"Companion with hash 0x{companion_hash:02x} already loaded")
|
||||
|
||||
sqlite_handler = None
|
||||
if self.repeater_handler and self.repeater_handler.storage:
|
||||
sqlite_handler = self.repeater_handler.storage.sqlite_handler
|
||||
|
||||
radio_config = (
|
||||
self.repeater_handler.radio_config
|
||||
if self.repeater_handler
|
||||
else self.config.get("radio", {})
|
||||
)
|
||||
|
||||
node_name = settings.get("node_name", name)
|
||||
tcp_port = settings.get("tcp_port", 5000)
|
||||
bind_address = settings.get("bind_address", "0.0.0.0")
|
||||
|
||||
bridge = RepeaterCompanionBridge(
|
||||
identity=identity,
|
||||
packet_injector=self.router.inject_packet,
|
||||
node_name=node_name,
|
||||
radio_config=radio_config,
|
||||
sqlite_handler=sqlite_handler,
|
||||
companion_hash=companion_hash_str,
|
||||
)
|
||||
|
||||
if sqlite_handler:
|
||||
contact_rows = sqlite_handler.companion_load_contacts(companion_hash_str)
|
||||
if contact_rows:
|
||||
records = []
|
||||
for row in contact_rows:
|
||||
d = dict(row)
|
||||
d["public_key"] = d.pop("pubkey", d.get("public_key", b""))
|
||||
records.append(d)
|
||||
bridge.contacts.load_from_dicts(records)
|
||||
|
||||
channel_rows = sqlite_handler.companion_load_channels(companion_hash_str)
|
||||
for row in channel_rows:
|
||||
s = row.get("secret", b"")
|
||||
if isinstance(s, bytes):
|
||||
raw = s
|
||||
elif isinstance(s, (bytearray, memoryview)):
|
||||
raw = bytes(s)
|
||||
elif s:
|
||||
raw = bytes.fromhex(s if isinstance(s, str) else str(s))
|
||||
else:
|
||||
raw = b""
|
||||
if len(raw) < 32:
|
||||
raw = raw + b"\x00" * (32 - len(raw))
|
||||
elif len(raw) > 32:
|
||||
raw = raw[:32]
|
||||
ch = Channel(name=row.get("name", ""), secret=raw)
|
||||
bridge.channels.set(row.get("channel_idx", 0), ch)
|
||||
|
||||
for msg_dict in sqlite_handler.companion_load_messages(companion_hash_str):
|
||||
from pymc_core.companion.models import QueuedMessage
|
||||
|
||||
sk = msg_dict.get("sender_key", b"")
|
||||
if isinstance(sk, str):
|
||||
sk = bytes.fromhex(sk)
|
||||
bridge.message_queue.push(
|
||||
QueuedMessage(
|
||||
sender_key=sk,
|
||||
txt_type=msg_dict.get("txt_type", 0),
|
||||
timestamp=msg_dict.get("timestamp", 0),
|
||||
text=msg_dict.get("text", ""),
|
||||
is_channel=bool(msg_dict.get("is_channel", False)),
|
||||
channel_idx=msg_dict.get("channel_idx", 0),
|
||||
path_len=msg_dict.get("path_len", 0),
|
||||
)
|
||||
)
|
||||
|
||||
if bridge.get_channel(0) is None:
|
||||
bridge.set_channel(0, "Public", DEFAULT_PUBLIC_CHANNEL_SECRET)
|
||||
|
||||
self.companion_bridges[companion_hash] = bridge
|
||||
|
||||
frame_server = CompanionFrameServer(
|
||||
bridge=bridge,
|
||||
companion_hash=companion_hash_str,
|
||||
port=tcp_port,
|
||||
bind_address=bind_address,
|
||||
sqlite_handler=sqlite_handler,
|
||||
local_hash=self.local_hash,
|
||||
stats_getter=self._get_companion_stats,
|
||||
control_handler=(
|
||||
self.discovery_helper.control_handler if self.discovery_helper else None
|
||||
),
|
||||
)
|
||||
await frame_server.start()
|
||||
self.companion_frame_servers.append(frame_server)
|
||||
|
||||
self.identity_manager.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
config=comp_config,
|
||||
identity_type="companion",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Hot-reload: Loaded companion '{name}': hash=0x{companion_hash:02x}, "
|
||||
f"port={tcp_port}, bind={bind_address}"
|
||||
)
|
||||
|
||||
async def _on_raw_rx_for_companions(self, data: bytes, rssi: int, snr: float) -> None:
|
||||
"""Raw RX subscriber: push PUSH_CODE_LOG_RX_DATA (0x88) to connected companion clients."""
|
||||
servers = getattr(self, "companion_frame_servers", [])
|
||||
if not servers:
|
||||
return
|
||||
for fs in servers:
|
||||
try:
|
||||
fs.push_rx_raw(snr, rssi, data)
|
||||
except Exception as e:
|
||||
logger.debug("Push RX raw to companion: %s", e)
|
||||
|
||||
async def deliver_control_data(
|
||||
self,
|
||||
name: str,
|
||||
identity,
|
||||
config: dict,
|
||||
identity_type: str
|
||||
snr: float,
|
||||
rssi: int,
|
||||
path_len: int,
|
||||
path_bytes: bytes,
|
||||
payload_bytes: bytes,
|
||||
) -> None:
|
||||
"""Deliver CONTROL payload (e.g. discovery response) to companion clients (PUSH_CODE_CONTROL_DATA 0x8E)."""
|
||||
# Only push discovery responses (0x90); client expects these, not the request (0x80)
|
||||
if len(payload_bytes) < 6 or (payload_bytes[0] & 0xF0) != 0x90:
|
||||
return
|
||||
# Push every discovery response to the client, including our own (snr=0, rssi=0 = local node's response)
|
||||
servers = getattr(self, "companion_frame_servers", [])
|
||||
if not servers:
|
||||
return
|
||||
tag = int.from_bytes(payload_bytes[2:6], "little") if len(payload_bytes) >= 6 else 0
|
||||
logger.debug(
|
||||
"Delivering discovery response to %s companion(s): tag=0x%08X, len=%s",
|
||||
len(servers),
|
||||
tag,
|
||||
len(payload_bytes),
|
||||
)
|
||||
for fs in servers:
|
||||
try:
|
||||
await fs.push_control_data(snr, rssi, path_len, path_bytes, payload_bytes)
|
||||
except Exception as e:
|
||||
logger.warning("Companion push_control_data error: %s", e)
|
||||
|
||||
async def _on_trace_complete_for_companions(self, packet, parsed_data) -> None:
|
||||
"""Trace completed at this node: push PUSH_CODE_TRACE_DATA (0x89) to companion clients (firmware onTraceRecv)."""
|
||||
path_len = len(parsed_data.get("trace_path", []))
|
||||
if path_len == 0:
|
||||
return
|
||||
path_hashes = bytes(parsed_data["trace_path"])
|
||||
flags = parsed_data.get("flags", 0)
|
||||
tag = parsed_data.get("tag", 0)
|
||||
auth_code = parsed_data.get("auth_code", 0)
|
||||
# path_snrs: exactly path_len bytes = (path_len-1) from forwarding hops + 1 (our receive SNR)
|
||||
snr_scaled = max(-128, min(127, int(round(packet.get_snr() * 4))))
|
||||
snr_byte = snr_scaled if snr_scaled >= 0 else (256 + snr_scaled)
|
||||
path_snrs = bytes(packet.path)[: path_len - 1] + bytes([snr_byte])
|
||||
for fs in getattr(self, "companion_frame_servers", []):
|
||||
try:
|
||||
fs.push_trace_data(
|
||||
path_len, flags, tag, auth_code, path_hashes, path_snrs, snr_byte
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Push trace data to companion: %s", e)
|
||||
|
||||
def _register_identity_everywhere(
|
||||
self, name: str, identity, config: dict, identity_type: str
|
||||
) -> bool:
|
||||
"""
|
||||
Register an identity with the manager and all helpers in one place.
|
||||
@@ -330,39 +750,31 @@ class RepeaterDaemon:
|
||||
"""
|
||||
# Register with identity manager
|
||||
success = self.identity_manager.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
config=config,
|
||||
identity_type=identity_type
|
||||
name=name, identity=identity, config=config, identity_type=identity_type
|
||||
)
|
||||
|
||||
|
||||
if not success:
|
||||
return False
|
||||
|
||||
|
||||
# Register with all helpers
|
||||
if self.login_helper:
|
||||
self.login_helper.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
identity_type=identity_type,
|
||||
config=config
|
||||
name=name, identity=identity, identity_type=identity_type, config=config
|
||||
)
|
||||
|
||||
|
||||
if self.text_helper:
|
||||
self.text_helper.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
identity_type=identity_type,
|
||||
radio_config=self.config.get("radio", {})
|
||||
radio_config=self.config.get("radio", {}),
|
||||
)
|
||||
|
||||
|
||||
if self.protocol_request_helper:
|
||||
self.protocol_request_helper.register_identity(
|
||||
name=name,
|
||||
identity=identity,
|
||||
identity_type=identity_type
|
||||
name=name, identity=identity, identity_type=identity_type
|
||||
)
|
||||
|
||||
|
||||
return True
|
||||
|
||||
async def _router_callback(self, packet):
|
||||
@@ -375,19 +787,15 @@ class RepeaterDaemon:
|
||||
await self.router.enqueue(packet)
|
||||
except Exception as e:
|
||||
logger.error(f"Error enqueuing packet in router: {e}", exc_info=True)
|
||||
|
||||
|
||||
def register_text_handler_for_identity(
|
||||
self,
|
||||
name: str,
|
||||
identity,
|
||||
identity_type: str = "room_server",
|
||||
radio_config: dict = None
|
||||
self, name: str, identity, identity_type: str = "room_server", radio_config: dict = None
|
||||
):
|
||||
|
||||
if not self.text_helper:
|
||||
logger.warning("Text helper not initialized, cannot register identity")
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
self.text_helper.register_identity(
|
||||
name=name,
|
||||
@@ -400,10 +808,10 @@ class RepeaterDaemon:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register text handler for '{name}': {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_stats(self) -> dict:
|
||||
stats = {}
|
||||
|
||||
|
||||
if self.repeater_handler:
|
||||
stats = self.repeater_handler.get_stats()
|
||||
# Add public key if available
|
||||
@@ -413,9 +821,62 @@ class RepeaterDaemon:
|
||||
stats["public_key"] = pubkey.hex()
|
||||
except Exception:
|
||||
stats["public_key"] = None
|
||||
|
||||
|
||||
return stats
|
||||
|
||||
async def _get_companion_stats(self, stats_type: int) -> dict:
|
||||
"""Return stats dict for companion CMD_GET_STATS (format expected by frame_server + meshcore_py)."""
|
||||
from repeater.companion.constants import (
|
||||
STATS_TYPE_CORE,
|
||||
STATS_TYPE_PACKETS,
|
||||
STATS_TYPE_RADIO,
|
||||
)
|
||||
|
||||
if not self.repeater_handler:
|
||||
return {}
|
||||
engine = self.repeater_handler
|
||||
airtime = engine.airtime_mgr.get_stats()
|
||||
uptime_secs = int(time.time() - engine.start_time)
|
||||
queue_len = 0
|
||||
for bridge in getattr(self, "companion_bridges", {}).values():
|
||||
queue_len += getattr(getattr(bridge, "message_queue", None), "count", 0) or 0
|
||||
if stats_type == STATS_TYPE_CORE:
|
||||
return {
|
||||
"battery_mv": 0,
|
||||
"uptime_secs": uptime_secs,
|
||||
"errors": 0,
|
||||
"queue_len": min(255, queue_len),
|
||||
}
|
||||
if stats_type == STATS_TYPE_RADIO:
|
||||
noise_floor = int(engine.get_noise_floor() or 0)
|
||||
radio = getattr(self, "dispatcher", None) and getattr(self.dispatcher, "radio", None)
|
||||
if radio:
|
||||
_r = getattr(radio, "get_last_rssi", lambda: 0)
|
||||
_s = getattr(radio, "get_last_snr", lambda: 0.0)
|
||||
last_rssi = _r() if callable(_r) else _r
|
||||
last_snr = _s() if callable(_s) else _s
|
||||
else:
|
||||
last_rssi, last_snr = 0, 0.0
|
||||
tx_air_secs = int(airtime.get("total_airtime_ms", 0) / 1000)
|
||||
return {
|
||||
"noise_floor": noise_floor,
|
||||
"last_rssi": int(last_rssi) if last_rssi is not None else 0,
|
||||
"last_snr": float(last_snr) if last_snr is not None else 0.0,
|
||||
"tx_air_secs": tx_air_secs,
|
||||
"rx_air_secs": 0,
|
||||
}
|
||||
if stats_type == STATS_TYPE_PACKETS:
|
||||
return {
|
||||
"recv": getattr(engine, "rx_count", 0),
|
||||
"sent": getattr(engine, "forwarded_count", 0),
|
||||
"flood_tx": getattr(engine, "forwarded_count", 0),
|
||||
"direct_tx": 0,
|
||||
"flood_rx": getattr(engine, "rx_count", 0),
|
||||
"direct_rx": 0,
|
||||
"recv_errors": getattr(engine, "dropped_count", 0),
|
||||
}
|
||||
return {}
|
||||
|
||||
async def send_advert(self) -> bool:
|
||||
|
||||
if not self.dispatcher or not self.local_identity:
|
||||
@@ -545,7 +1006,7 @@ class RepeaterDaemon:
|
||||
config=self.config,
|
||||
event_loop=current_loop,
|
||||
daemon_instance=self,
|
||||
config_path=getattr(self, 'config_path', '/etc/pymc_repeater/config.yaml'),
|
||||
config_path=getattr(self, "config_path", "/etc/pymc_repeater/config.yaml"),
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -558,7 +1019,22 @@ class RepeaterDaemon:
|
||||
await self.dispatcher.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Shutting down...")
|
||||
|
||||
for frame_server in getattr(self, "companion_frame_servers", []):
|
||||
try:
|
||||
await frame_server.stop()
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion frame server stop: {e}")
|
||||
if hasattr(self, "companion_bridges"):
|
||||
for bridge in self.companion_bridges.values():
|
||||
if hasattr(bridge, "stop"):
|
||||
try:
|
||||
await bridge.stop()
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge stop: {e}")
|
||||
if self.router:
|
||||
await self.router.stop()
|
||||
if self.http_server:
|
||||
self.http_server.stop()
|
||||
finally:
|
||||
await self._shutdown()
|
||||
|
||||
@@ -582,14 +1058,13 @@ def main():
|
||||
|
||||
# Load configuration
|
||||
config = load_config(args.config)
|
||||
config_path = args.config if args.config else '/etc/pymc_repeater/config.yaml'
|
||||
config_path = args.config if args.config else "/etc/pymc_repeater/config.yaml"
|
||||
|
||||
if args.log_level:
|
||||
if "logging" not in config:
|
||||
config["logging"] = {}
|
||||
config["logging"]["level"] = args.log_level
|
||||
|
||||
|
||||
# Don't initialize radio here - it will be done inside the async event loop
|
||||
daemon = RepeaterDaemon(config, radio=None)
|
||||
daemon.config_path = config_path
|
||||
|
||||
+214
-31
@@ -1,16 +1,34 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
|
||||
from pymc_core.node.handlers.trace import TraceHandler
|
||||
from pymc_core.node.handlers.control import ControlHandler
|
||||
from pymc_core.node.handlers.ack import AckHandler
|
||||
from pymc_core.node.handlers.advert import AdvertHandler
|
||||
from pymc_core.node.handlers.control import ControlHandler
|
||||
from pymc_core.node.handlers.group_text import GroupTextHandler
|
||||
from pymc_core.node.handlers.login_response import LoginResponseHandler
|
||||
from pymc_core.node.handlers.login_server import LoginServerHandler
|
||||
from pymc_core.node.handlers.text import TextMessageHandler
|
||||
from pymc_core.node.handlers.path import PathHandler
|
||||
from pymc_core.node.handlers.protocol_request import ProtocolRequestHandler
|
||||
from pymc_core.node.handlers.protocol_response import ProtocolResponseHandler
|
||||
from pymc_core.node.handlers.text import TextMessageHandler
|
||||
from pymc_core.node.handlers.trace import TraceHandler
|
||||
|
||||
logger = logging.getLogger("PacketRouter")
|
||||
|
||||
# Deliver PATH and protocol-response (PATH) to companion at most once per logical packet
|
||||
# so the client is not spammed with duplicate telemetry when the mesh delivers multiple copies.
|
||||
_COMPANION_DEDUPE_TTL_SEC = 60.0
|
||||
|
||||
|
||||
def _companion_dedup_key(packet) -> str | None:
|
||||
"""Return a stable key for companion delivery deduplication, or None if not available."""
|
||||
try:
|
||||
return packet.calculate_packet_hash().hex().upper()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
class PacketRouter:
|
||||
|
||||
def __init__(self, daemon_instance):
|
||||
@@ -18,7 +36,11 @@ class PacketRouter:
|
||||
self.queue = asyncio.Queue()
|
||||
self.running = False
|
||||
self.router_task = None
|
||||
|
||||
# Serialize injects so one local TX completes before the next is processed
|
||||
self._inject_lock = asyncio.Lock()
|
||||
# Hash -> expiry time; skip delivering same PATH/protocol-response to companions more than once
|
||||
self._companion_delivered = {}
|
||||
|
||||
async def start(self):
|
||||
self.running = True
|
||||
self.router_task = asyncio.create_task(self._process_queue())
|
||||
@@ -34,6 +56,19 @@ class PacketRouter:
|
||||
pass
|
||||
logger.info("Packet router stopped")
|
||||
|
||||
def _should_deliver_path_to_companions(self, packet) -> bool:
|
||||
"""Return True if this PATH/protocol-response should be delivered to companions (first of duplicates)."""
|
||||
key = _companion_dedup_key(packet)
|
||||
if not key:
|
||||
return True
|
||||
now = time.time()
|
||||
# Prune expired
|
||||
self._companion_delivered = {k: v for k, v in self._companion_delivered.items() if v > now}
|
||||
if key in self._companion_delivered:
|
||||
return False
|
||||
self._companion_delivered[key] = now + _COMPANION_DEDUPE_TTL_SEC
|
||||
return True
|
||||
|
||||
async def enqueue(self, packet):
|
||||
"""Add packet to router queue."""
|
||||
await self.queue.put(packet)
|
||||
@@ -42,17 +77,35 @@ class PacketRouter:
|
||||
try:
|
||||
metadata = {
|
||||
"rssi": getattr(packet, "rssi", 0),
|
||||
"snr": getattr(packet, "snr", 0.0),
|
||||
"snr": getattr(packet, "snr", 0.0),
|
||||
"timestamp": getattr(packet, "timestamp", 0),
|
||||
}
|
||||
|
||||
# Use local_transmission=True to bypass forwarding logic
|
||||
await self.daemon.repeater_handler(packet, metadata, local_transmission=True)
|
||||
|
||||
|
||||
# Serialize injects so one local TX completes before the next runs
|
||||
# (avoids duty-cycle or dispatcher races where a later packet goes out first)
|
||||
async with self._inject_lock:
|
||||
# Use local_transmission=True to bypass forwarding logic
|
||||
await self.daemon.repeater_handler(
|
||||
packet, metadata, local_transmission=True
|
||||
)
|
||||
|
||||
# Enqueue so router can deliver to companion(s): TXT_MSG -> dest bridge, ACK -> all bridges (sender sees ACK)
|
||||
await self.enqueue(packet)
|
||||
|
||||
packet_len = len(packet.payload) if packet.payload else 0
|
||||
logger.debug(f"Injected packet processed by engine as local transmission ({packet_len} bytes)")
|
||||
logger.debug(
|
||||
f"Injected packet processed by engine as local transmission ({packet_len} bytes)"
|
||||
)
|
||||
# Log protocol REQ (e.g. status/telemetry) so we can confirm target node
|
||||
ptype = getattr(packet, "get_payload_type", lambda: None)()
|
||||
if ptype == ProtocolRequestHandler.payload_type() and packet.payload and packet_len >= 1:
|
||||
logger.info(
|
||||
"Injected protocol REQ: dest=0x%02x, payload=%d bytes",
|
||||
packet.payload[0],
|
||||
packet_len,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error injecting packet through engine: {e}")
|
||||
return False
|
||||
@@ -66,13 +119,12 @@ class PacketRouter:
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Router error: {e}", exc_info=True)
|
||||
|
||||
|
||||
|
||||
async def _route_packet(self, packet):
|
||||
|
||||
payload_type = packet.get_payload_type()
|
||||
processed_by_injection = False
|
||||
|
||||
|
||||
# Route to specific handlers for parsing only
|
||||
if payload_type == TraceHandler.payload_type():
|
||||
# Process trace packet
|
||||
@@ -86,43 +138,174 @@ class PacketRouter:
|
||||
if self.daemon.discovery_helper:
|
||||
await self.daemon.discovery_helper.control_handler(packet)
|
||||
packet.mark_do_not_retransmit()
|
||||
|
||||
# Deliver to companions via daemon (frame servers push PUSH_CODE_CONTROL_DATA 0x8E)
|
||||
deliver = getattr(self.daemon, "deliver_control_data", None)
|
||||
if deliver:
|
||||
snr = getattr(packet, "_snr", None) or getattr(packet, "snr", 0.0)
|
||||
rssi = getattr(packet, "_rssi", None) or getattr(packet, "rssi", 0)
|
||||
path_len = getattr(packet, "path_len", 0) or 0
|
||||
path_bytes = (
|
||||
bytes(getattr(packet, "path", []))
|
||||
if getattr(packet, "path", None) is not None
|
||||
else b""
|
||||
)[:path_len]
|
||||
payload_bytes = bytes(packet.payload) if packet.payload else b""
|
||||
await deliver(snr, rssi, path_len, path_bytes, payload_bytes)
|
||||
|
||||
elif payload_type == AdvertHandler.payload_type():
|
||||
# Process advertisement packet for neighbor tracking
|
||||
if self.daemon.advert_helper:
|
||||
rssi = getattr(packet, "rssi", 0)
|
||||
snr = getattr(packet, "snr", 0.0)
|
||||
await self.daemon.advert_helper.process_advert_packet(packet, rssi, snr)
|
||||
|
||||
# Also feed adverts to companion bridges (for contact/path updates)
|
||||
for bridge in getattr(self.daemon, "companion_bridges", {}).values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge advert error: {e}")
|
||||
|
||||
elif payload_type == LoginServerHandler.payload_type():
|
||||
# Process ANON_REQ login packet for all identities
|
||||
if self.daemon.login_helper:
|
||||
# Route to companion if dest is a companion; else to login_helper (for logging into this repeater).
|
||||
# If dest is remote (no local handler), mark processed so we don't pass our own outbound login TX to the repeater as RX.
|
||||
dest_hash = packet.payload[0] if packet.payload else None
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
if dest_hash is not None and dest_hash in companion_bridges:
|
||||
await companion_bridges[dest_hash].process_received_packet(packet)
|
||||
processed_by_injection = True
|
||||
elif self.daemon.login_helper:
|
||||
handled = await self.daemon.login_helper.process_login_packet(packet)
|
||||
# Only skip forwarding if we actually handled it
|
||||
if handled:
|
||||
processed_by_injection = True
|
||||
|
||||
else:
|
||||
# Login request for remote repeater (we already TXed it via inject); don't treat as RX.
|
||||
processed_by_injection = True
|
||||
|
||||
elif payload_type == AckHandler.payload_type():
|
||||
# ACK has no dest in payload (4-byte CRC only); deliver to all bridges so sender sees send_confirmed
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge ACK error: {e}")
|
||||
processed_by_injection = True
|
||||
|
||||
elif payload_type == TextMessageHandler.payload_type():
|
||||
# Process TXT_MSG packet for all identities
|
||||
if self.daemon.text_helper:
|
||||
dest_hash = packet.payload[0] if packet.payload else None
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
if dest_hash is not None and dest_hash in companion_bridges:
|
||||
await companion_bridges[dest_hash].process_received_packet(packet)
|
||||
processed_by_injection = True
|
||||
elif self.daemon.text_helper:
|
||||
handled = await self.daemon.text_helper.process_text_packet(packet)
|
||||
# Only skip forwarding if we actually handled it
|
||||
if handled:
|
||||
processed_by_injection = True
|
||||
|
||||
|
||||
elif payload_type == PathHandler.payload_type():
|
||||
# Process PATH packet to update client out_path for direct routing
|
||||
if self.daemon.path_helper:
|
||||
dest_hash = packet.payload[0] if packet.payload else None
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
if dest_hash is not None and dest_hash in companion_bridges:
|
||||
if self._should_deliver_path_to_companions(packet):
|
||||
await companion_bridges[dest_hash].process_received_packet(packet)
|
||||
processed_by_injection = True
|
||||
elif companion_bridges and self._should_deliver_path_to_companions(packet):
|
||||
# Dest not in bridges: path-return with ephemeral dest (e.g. multi-hop login).
|
||||
# Deliver to all bridges; each will try to decrypt and ignore if not relevant.
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge PATH error: {e}")
|
||||
logger.debug(
|
||||
"PATH dest=0x%02x (anon) delivered to %d bridge(s) for matching",
|
||||
dest_hash or 0,
|
||||
len(companion_bridges),
|
||||
)
|
||||
processed_by_injection = True
|
||||
elif self.daemon.path_helper:
|
||||
await self.daemon.path_helper.process_path_packet(packet)
|
||||
# Note: process_path_packet returns False to allow forwarding
|
||||
|
||||
|
||||
elif payload_type == LoginResponseHandler.payload_type():
|
||||
# PAYLOAD_TYPE_RESPONSE (0x01): payload is dest_hash(1)+src_hash(1)+encrypted.
|
||||
# Deliver to the bridge that is the destination, or to all bridges when the
|
||||
# response is addressed to this repeater (path-based reply: firmware sends
|
||||
# to first hop instead of original requester).
|
||||
dest_hash = packet.payload[0] if packet.payload and len(packet.payload) >= 1 else None
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
local_hash = getattr(self.daemon, "local_hash", None)
|
||||
if dest_hash is not None and dest_hash in companion_bridges:
|
||||
try:
|
||||
await companion_bridges[dest_hash].process_received_packet(packet)
|
||||
logger.info(
|
||||
"RESPONSE dest=0x%02x delivered to companion bridge",
|
||||
dest_hash,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge RESPONSE error: {e}")
|
||||
processed_by_injection = True
|
||||
elif dest_hash == local_hash and companion_bridges:
|
||||
# Response addressed to this repeater (e.g. path-based reply to first hop)
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge RESPONSE error: {e}")
|
||||
logger.info(
|
||||
"RESPONSE dest=0x%02x (local) delivered to %d companion bridge(s)",
|
||||
dest_hash,
|
||||
len(companion_bridges),
|
||||
)
|
||||
processed_by_injection = True
|
||||
elif companion_bridges:
|
||||
# Dest not in bridges and not local: likely ANON_REQ response (dest = ephemeral
|
||||
# sender hash). Deliver to all bridges; each will try to decrypt and ignore if
|
||||
# not relevant (firmware-like behavior, works with multiple companion bridges).
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge RESPONSE error: {e}")
|
||||
logger.debug(
|
||||
"RESPONSE dest=0x%02x (anon) delivered to %d bridge(s) for matching",
|
||||
dest_hash or 0,
|
||||
len(companion_bridges),
|
||||
)
|
||||
processed_by_injection = True
|
||||
|
||||
elif payload_type == ProtocolResponseHandler.payload_type():
|
||||
# PAYLOAD_TYPE_PATH (0x08): protocol responses (telemetry, binary, etc.).
|
||||
# Deliver at most once per logical packet so the client is not spammed with duplicates.
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
if companion_bridges and self._should_deliver_path_to_companions(packet):
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge RESPONSE error: {e}")
|
||||
if companion_bridges:
|
||||
processed_by_injection = True
|
||||
|
||||
elif payload_type == ProtocolRequestHandler.payload_type():
|
||||
# Process protocol request packet (status, telemetry, neighbors, etc.)
|
||||
if self.daemon.protocol_request_helper:
|
||||
dest_hash = packet.payload[0] if packet.payload else None
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
if dest_hash is not None and dest_hash in companion_bridges:
|
||||
await companion_bridges[dest_hash].process_received_packet(packet)
|
||||
processed_by_injection = True
|
||||
elif self.daemon.protocol_request_helper:
|
||||
handled = await self.daemon.protocol_request_helper.process_request_packet(packet)
|
||||
if handled:
|
||||
processed_by_injection = True
|
||||
|
||||
|
||||
elif payload_type == GroupTextHandler.payload_type():
|
||||
# GRP_TXT: pass to all companions (they filter by channel); still forward
|
||||
companion_bridges = getattr(self.daemon, "companion_bridges", {})
|
||||
for bridge in companion_bridges.values():
|
||||
try:
|
||||
await bridge.process_received_packet(packet)
|
||||
except Exception as e:
|
||||
logger.debug(f"Companion bridge GRP_TXT error: {e}")
|
||||
|
||||
# Only pass to repeater engine if not already processed by injection
|
||||
if self.daemon.repeater_handler and not processed_by_injection:
|
||||
metadata = {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
Service management utilities for pyMC Repeater.
|
||||
Provides functions for service control operations like restart.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
from typing import Tuple
|
||||
@@ -22,12 +23,9 @@ def restart_service() -> Tuple[bool, str]:
|
||||
# Try polkit-based restart first (works on bare metal / VMs with polkit running)
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['systemctl', 'restart', 'pymc-repeater'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5
|
||||
["systemctl", "restart", "pymc-repeater"], capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
|
||||
if result.returncode == 0:
|
||||
logger.info("Service restart via polkit succeeded")
|
||||
return True, "Service restart initiated"
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from .http_server import HTTPStatsServer, StatsApp, LogBuffer, _log_buffer
|
||||
from .api_endpoints import APIEndpoints
|
||||
from .cad_calibration_engine import CADCalibrationEngine
|
||||
from .http_server import HTTPStatsServer, LogBuffer, StatsApp, _log_buffer
|
||||
|
||||
__all__ = [
|
||||
'HTTPStatsServer',
|
||||
'StatsApp',
|
||||
'LogBuffer',
|
||||
'APIEndpoints',
|
||||
'CADCalibrationEngine',
|
||||
'_log_buffer'
|
||||
]
|
||||
"HTTPStatsServer",
|
||||
"StatsApp",
|
||||
"LogBuffer",
|
||||
"APIEndpoints",
|
||||
"CADCalibrationEngine",
|
||||
"_log_buffer",
|
||||
]
|
||||
|
||||
+1478
-1101
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,5 @@
|
||||
from .jwt_handler import JWTHandler
|
||||
from .api_tokens import APITokenManager
|
||||
from .jwt_handler import JWTHandler
|
||||
from .middleware import require_auth
|
||||
|
||||
__all__ = [
|
||||
'JWTHandler',
|
||||
'APITokenManager',
|
||||
'require_auth'
|
||||
]
|
||||
__all__ = ["JWTHandler", "APITokenManager", "require_auth"]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import secrets
|
||||
import hmac
|
||||
import hashlib
|
||||
from typing import Optional, List, Dict
|
||||
import hmac
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -11,18 +11,14 @@ class APITokenManager:
|
||||
def __init__(self, sqlite_handler, secret_key: str):
|
||||
|
||||
self.db = sqlite_handler
|
||||
self.secret_key = secret_key.encode('utf-8')
|
||||
|
||||
self.secret_key = secret_key.encode("utf-8")
|
||||
|
||||
def generate_api_token(self) -> str:
|
||||
return secrets.token_hex(32)
|
||||
|
||||
|
||||
def hash_token(self, token: str) -> str:
|
||||
return hmac.new(
|
||||
self.secret_key,
|
||||
token.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
return hmac.new(self.secret_key, token.encode("utf-8"), hashlib.sha256).hexdigest()
|
||||
|
||||
def create_token(self, name: str) -> tuple[int, str]:
|
||||
plaintext_token = self.generate_api_token()
|
||||
token_hash = self.hash_token(plaintext_token)
|
||||
@@ -43,7 +39,6 @@ class APITokenManager:
|
||||
logger.info(f"Revoked API token ID {token_id}")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
def list_tokens(self) -> List[Dict]:
|
||||
return self.db.list_api_tokens()
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
|
||||
import cherrypy
|
||||
|
||||
logger = logging.getLogger("HTTPServer")
|
||||
@@ -40,10 +41,10 @@ def check_auth():
|
||||
cherrypy.request.user = {
|
||||
"username": payload.get("sub"),
|
||||
"client_id": payload.get("client_id"),
|
||||
"auth_type": "jwt"
|
||||
"auth_type": "jwt",
|
||||
}
|
||||
return
|
||||
|
||||
|
||||
# Check for JWT token in query parameter (for EventSource/SSE)
|
||||
# EventSource doesn't support custom headers, so we use query param
|
||||
query_token = cherrypy.request.params.get("token")
|
||||
@@ -54,7 +55,7 @@ def check_auth():
|
||||
cherrypy.request.user = {
|
||||
"username": payload.get("sub"),
|
||||
"client_id": payload.get("client_id"),
|
||||
"auth_type": "jwt_query"
|
||||
"auth_type": "jwt_query",
|
||||
}
|
||||
# Remove token from params to avoid exposing it in logs
|
||||
del cherrypy.request.params["token"]
|
||||
@@ -69,15 +70,15 @@ def check_auth():
|
||||
cherrypy.request.user = {
|
||||
"token_id": token_info["id"],
|
||||
"token_name": token_info["name"],
|
||||
"auth_type": "api_token"
|
||||
"auth_type": "api_token",
|
||||
}
|
||||
return
|
||||
|
||||
|
||||
# No valid authentication found
|
||||
logger.warning(f"Unauthorized access attempt to {cherrypy.request.path_info}")
|
||||
raise cherrypy.HTTPError(401, "Unauthorized - Valid JWT or API token required")
|
||||
|
||||
|
||||
# Register the tool
|
||||
cherrypy.tools.require_auth = cherrypy.Tool('before_handler', check_auth)
|
||||
cherrypy.tools.require_auth = cherrypy.Tool("before_handler", check_auth)
|
||||
logger.info("CherryPy require_auth tool registered")
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import jwt
|
||||
import logging
|
||||
import time
|
||||
from typing import Dict, Optional
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JWTHandler:
|
||||
def __init__(self, secret: str, expiry_minutes: int = 15):
|
||||
self.secret = secret
|
||||
@@ -14,21 +16,16 @@ class JWTHandler:
|
||||
|
||||
now = int(time.time())
|
||||
expiry = now + (self.expiry_minutes * 60)
|
||||
|
||||
payload = {
|
||||
'sub': username,
|
||||
'exp': expiry,
|
||||
'iat': now,
|
||||
'client_id': client_id
|
||||
}
|
||||
|
||||
token = jwt.encode(payload, self.secret, algorithm='HS256')
|
||||
|
||||
payload = {"sub": username, "exp": expiry, "iat": now, "client_id": client_id}
|
||||
|
||||
token = jwt.encode(payload, self.secret, algorithm="HS256")
|
||||
logger.info(f"Created JWT for user '{username}' with client_id '{client_id[:8]}...'")
|
||||
return token
|
||||
|
||||
|
||||
def verify_jwt(self, token: str) -> Optional[Dict]:
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret, algorithms=['HS256'])
|
||||
payload = jwt.decode(token, self.secret, algorithms=["HS256"])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("JWT token expired")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import cherrypy
|
||||
from functools import wraps
|
||||
import logging
|
||||
from functools import wraps
|
||||
|
||||
import cherrypy
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -10,56 +11,56 @@ def require_auth(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Skip authentication for OPTIONS requests (CORS preflight)
|
||||
if cherrypy.request.method == 'OPTIONS':
|
||||
if cherrypy.request.method == "OPTIONS":
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
||||
# Get auth handlers from global cherrypy config (not app config)
|
||||
jwt_handler = cherrypy.config.get('jwt_handler')
|
||||
token_manager = cherrypy.config.get('token_manager')
|
||||
|
||||
jwt_handler = cherrypy.config.get("jwt_handler")
|
||||
token_manager = cherrypy.config.get("token_manager")
|
||||
|
||||
if not jwt_handler or not token_manager:
|
||||
logger.error("Auth handlers not configured")
|
||||
raise cherrypy.HTTPError(500, "Authentication not configured")
|
||||
|
||||
|
||||
# Try JWT authentication first
|
||||
auth_header = cherrypy.request.headers.get('Authorization', '')
|
||||
if auth_header.startswith('Bearer '):
|
||||
auth_header = cherrypy.request.headers.get("Authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
token = auth_header[7:] # Remove 'Bearer ' prefix
|
||||
payload = jwt_handler.verify_jwt(token)
|
||||
|
||||
|
||||
if payload:
|
||||
# JWT is valid
|
||||
cherrypy.request.user = {
|
||||
'username': payload['sub'],
|
||||
'client_id': payload['client_id'],
|
||||
'auth_type': 'jwt'
|
||||
"username": payload["sub"],
|
||||
"client_id": payload["client_id"],
|
||||
"auth_type": "jwt",
|
||||
}
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
logger.warning("Invalid or expired JWT token")
|
||||
|
||||
|
||||
# Try API token authentication
|
||||
api_key = cherrypy.request.headers.get('X-API-Key', '')
|
||||
api_key = cherrypy.request.headers.get("X-API-Key", "")
|
||||
if api_key:
|
||||
token_info = token_manager.verify_token(api_key)
|
||||
|
||||
|
||||
if token_info:
|
||||
# API token is valid
|
||||
cherrypy.request.user = {
|
||||
'username': 'api_token',
|
||||
'token_name': token_info['name'],
|
||||
'token_id': token_info['id'],
|
||||
'auth_type': 'api_token'
|
||||
"username": "api_token",
|
||||
"token_name": token_info["name"],
|
||||
"token_id": token_info["id"],
|
||||
"auth_type": "api_token",
|
||||
}
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
logger.warning("Invalid API token")
|
||||
|
||||
|
||||
# No valid authentication found
|
||||
logger.warning(f"Unauthorized access attempt to {cherrypy.request.path_info}")
|
||||
|
||||
|
||||
cherrypy.response.status = 401
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json'
|
||||
return {'success': False, 'error': 'Unauthorized - Valid JWT or API token required'}
|
||||
|
||||
return wrapper
|
||||
cherrypy.response.headers["Content-Type"] = "application/json"
|
||||
return {"success": False, "error": "Unauthorized - Valid JWT or API token required"}
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -3,13 +3,13 @@ import logging
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, Any, Optional
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
logger = logging.getLogger("HTTPServer")
|
||||
|
||||
|
||||
class CADCalibrationEngine:
|
||||
|
||||
|
||||
def __init__(self, daemon_instance=None, event_loop=None):
|
||||
self.daemon_instance = daemon_instance
|
||||
self.event_loop = event_loop
|
||||
@@ -19,26 +19,28 @@ class CADCalibrationEngine:
|
||||
self.progress = {"current": 0, "total": 0}
|
||||
self.clients = set() # SSE clients
|
||||
self.calibration_thread = None
|
||||
|
||||
|
||||
def get_test_ranges(self, spreading_factor: int):
|
||||
"""Get CAD test ranges"""
|
||||
# Higher values = less sensitive, lower values = more sensitive
|
||||
# Test from LESS sensitive to MORE sensitive to find the sweet spot
|
||||
sf_ranges = {
|
||||
7: (range(22, 30, 1), range(12, 20, 1)),
|
||||
8: (range(22, 30, 1), range(12, 20, 1)),
|
||||
9: (range(24, 32, 1), range(14, 22, 1)),
|
||||
10: (range(26, 34, 1), range(16, 24, 1)),
|
||||
11: (range(28, 36, 1), range(18, 26, 1)),
|
||||
12: (range(30, 38, 1), range(20, 28, 1)),
|
||||
7: (range(22, 30, 1), range(12, 20, 1)),
|
||||
8: (range(22, 30, 1), range(12, 20, 1)),
|
||||
9: (range(24, 32, 1), range(14, 22, 1)),
|
||||
10: (range(26, 34, 1), range(16, 24, 1)),
|
||||
11: (range(28, 36, 1), range(18, 26, 1)),
|
||||
12: (range(30, 38, 1), range(20, 28, 1)),
|
||||
}
|
||||
return sf_ranges.get(spreading_factor, sf_ranges[8])
|
||||
|
||||
async def test_cad_config(self, radio, det_peak: int, det_min: int, samples: int = 20) -> Dict[str, Any]:
|
||||
|
||||
|
||||
async def test_cad_config(
|
||||
self, radio, det_peak: int, det_min: int, samples: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
detections = 0
|
||||
baseline_detections = 0
|
||||
|
||||
|
||||
# First, get baseline with very insensitive settings (should detect nothing)
|
||||
baseline_samples = 5
|
||||
for _ in range(baseline_samples):
|
||||
@@ -50,10 +52,10 @@ class CADCalibrationEngine:
|
||||
except Exception:
|
||||
pass
|
||||
await asyncio.sleep(0.1) # 100ms between baseline samples
|
||||
|
||||
|
||||
# Wait before actual test
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
|
||||
# Now test the actual configuration
|
||||
for i in range(samples):
|
||||
try:
|
||||
@@ -62,226 +64,247 @@ class CADCalibrationEngine:
|
||||
detections += 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# Variable delay to avoid sampling artifacts
|
||||
delay = 0.05 + (i % 3) * 0.05 # 50ms, 100ms, 150ms rotation
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
|
||||
# Calculate adjusted detection rate
|
||||
baseline_rate = (baseline_detections / baseline_samples) * 100
|
||||
detection_rate = (detections / samples) * 100
|
||||
|
||||
|
||||
# Subtract baseline noise
|
||||
adjusted_rate = max(0, detection_rate - baseline_rate)
|
||||
|
||||
|
||||
return {
|
||||
'det_peak': det_peak,
|
||||
'det_min': det_min,
|
||||
'samples': samples,
|
||||
'detections': detections,
|
||||
'detection_rate': detection_rate,
|
||||
'baseline_rate': baseline_rate,
|
||||
'adjusted_rate': adjusted_rate, # This is the useful metric
|
||||
'sensitivity_score': self._calculate_sensitivity_score(det_peak, det_min, adjusted_rate)
|
||||
"det_peak": det_peak,
|
||||
"det_min": det_min,
|
||||
"samples": samples,
|
||||
"detections": detections,
|
||||
"detection_rate": detection_rate,
|
||||
"baseline_rate": baseline_rate,
|
||||
"adjusted_rate": adjusted_rate, # This is the useful metric
|
||||
"sensitivity_score": self._calculate_sensitivity_score(
|
||||
det_peak, det_min, adjusted_rate
|
||||
),
|
||||
}
|
||||
|
||||
def _calculate_sensitivity_score(self, det_peak: int, det_min: int, adjusted_rate: float) -> float:
|
||||
|
||||
|
||||
def _calculate_sensitivity_score(
|
||||
self, det_peak: int, det_min: int, adjusted_rate: float
|
||||
) -> float:
|
||||
|
||||
# Ideal detection rate is around 10-30% for good sensitivity without false positives
|
||||
ideal_rate = 20.0
|
||||
rate_penalty = abs(adjusted_rate - ideal_rate) / ideal_rate
|
||||
|
||||
|
||||
# Prefer moderate sensitivity settings (not too extreme)
|
||||
sensitivity_penalty = (abs(det_peak - 25) + abs(det_min - 15)) / 20.0
|
||||
|
||||
|
||||
# Lower penalty = higher score
|
||||
score = max(0, 100 - (rate_penalty * 50) - (sensitivity_penalty * 20))
|
||||
return score
|
||||
|
||||
|
||||
def broadcast_to_clients(self, data):
|
||||
|
||||
# Store the message for clients to pick up
|
||||
self.last_message = data
|
||||
# Also store in a queue for clients to consume
|
||||
if not hasattr(self, 'message_queue'):
|
||||
if not hasattr(self, "message_queue"):
|
||||
self.message_queue = []
|
||||
self.message_queue.append(data)
|
||||
|
||||
|
||||
def calibration_worker(self, samples: int, delay_ms: int):
|
||||
|
||||
|
||||
try:
|
||||
# Get radio from daemon instance
|
||||
if not self.daemon_instance:
|
||||
self.broadcast_to_clients({"type": "error", "message": "No daemon instance available"})
|
||||
self.broadcast_to_clients(
|
||||
{"type": "error", "message": "No daemon instance available"}
|
||||
)
|
||||
return
|
||||
|
||||
radio = getattr(self.daemon_instance, 'radio', None)
|
||||
|
||||
radio = getattr(self.daemon_instance, "radio", None)
|
||||
if not radio:
|
||||
self.broadcast_to_clients({"type": "error", "message": "Radio instance not available"})
|
||||
self.broadcast_to_clients(
|
||||
{"type": "error", "message": "Radio instance not available"}
|
||||
)
|
||||
return
|
||||
if not hasattr(radio, 'perform_cad'):
|
||||
self.broadcast_to_clients({"type": "error", "message": "Radio does not support CAD"})
|
||||
if not hasattr(radio, "perform_cad"):
|
||||
self.broadcast_to_clients(
|
||||
{"type": "error", "message": "Radio does not support CAD"}
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# Get spreading factor from daemon instance
|
||||
config = getattr(self.daemon_instance, 'config', {})
|
||||
config = getattr(self.daemon_instance, "config", {})
|
||||
radio_config = config.get("radio", {})
|
||||
sf = radio_config.get("spreading_factor", 8)
|
||||
|
||||
|
||||
# Get test ranges
|
||||
peak_range, min_range = self.get_test_ranges(sf)
|
||||
|
||||
|
||||
total_tests = len(peak_range) * len(min_range)
|
||||
self.progress = {"current": 0, "total": total_tests}
|
||||
|
||||
self.broadcast_to_clients({
|
||||
"type": "status",
|
||||
"message": f"Starting calibration: SF{sf}, {total_tests} tests",
|
||||
"test_ranges": {
|
||||
"peak_min": min(peak_range),
|
||||
"peak_max": max(peak_range),
|
||||
"min_min": min(min_range),
|
||||
"min_max": max(min_range),
|
||||
"spreading_factor": sf,
|
||||
"total_tests": total_tests
|
||||
|
||||
self.broadcast_to_clients(
|
||||
{
|
||||
"type": "status",
|
||||
"message": f"Starting calibration: SF{sf}, {total_tests} tests",
|
||||
"test_ranges": {
|
||||
"peak_min": min(peak_range),
|
||||
"peak_max": max(peak_range),
|
||||
"min_min": min(min_range),
|
||||
"min_max": max(min_range),
|
||||
"spreading_factor": sf,
|
||||
"total_tests": total_tests,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
)
|
||||
|
||||
current = 0
|
||||
|
||||
|
||||
peak_list = list(peak_range)
|
||||
min_list = list(min_range)
|
||||
|
||||
|
||||
# Create all test combinations
|
||||
test_combinations = []
|
||||
for det_peak in peak_list:
|
||||
for det_min in min_list:
|
||||
test_combinations.append((det_peak, det_min))
|
||||
|
||||
|
||||
# Sort by distance from center for center-out pattern
|
||||
peak_center = (max(peak_list) + min(peak_list)) / 2
|
||||
min_center = (max(min_list) + min(min_list)) / 2
|
||||
|
||||
|
||||
def distance_from_center(combo):
|
||||
peak, min_val = combo
|
||||
return ((peak - peak_center) ** 2 + (min_val - min_center) ** 2) ** 0.5
|
||||
|
||||
|
||||
# Sort by distance from center
|
||||
test_combinations.sort(key=distance_from_center)
|
||||
|
||||
|
||||
# Randomize within bands for better coverage
|
||||
band_size = max(1, len(test_combinations) // 8) # Create 8 bands
|
||||
randomized_combinations = []
|
||||
|
||||
|
||||
for i in range(0, len(test_combinations), band_size):
|
||||
band = test_combinations[i:i + band_size]
|
||||
band = test_combinations[i : i + band_size]
|
||||
random.shuffle(band) # Randomize within each band
|
||||
randomized_combinations.extend(band)
|
||||
|
||||
|
||||
# Run calibration in event loop with center-out randomized pattern
|
||||
if self.event_loop:
|
||||
for det_peak, det_min in randomized_combinations:
|
||||
if not self.running:
|
||||
break
|
||||
|
||||
|
||||
current += 1
|
||||
self.progress["current"] = current
|
||||
|
||||
|
||||
# Update progress
|
||||
self.broadcast_to_clients({
|
||||
"type": "progress",
|
||||
"current": current,
|
||||
"total": total_tests,
|
||||
"peak": det_peak,
|
||||
"min": det_min
|
||||
})
|
||||
|
||||
self.broadcast_to_clients(
|
||||
{
|
||||
"type": "progress",
|
||||
"current": current,
|
||||
"total": total_tests,
|
||||
"peak": det_peak,
|
||||
"min": det_min,
|
||||
}
|
||||
)
|
||||
|
||||
# Run the test
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
self.test_cad_config(radio, det_peak, det_min, samples),
|
||||
self.event_loop
|
||||
self.test_cad_config(radio, det_peak, det_min, samples), self.event_loop
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
result = future.result(timeout=30) # 30 second timeout per test
|
||||
|
||||
|
||||
# Store result
|
||||
key = f"{det_peak}-{det_min}"
|
||||
self.results[key] = result
|
||||
|
||||
|
||||
# Send result to clients
|
||||
self.broadcast_to_clients({
|
||||
"type": "result",
|
||||
**result
|
||||
})
|
||||
self.broadcast_to_clients({"type": "result", **result})
|
||||
except Exception as e:
|
||||
logger.error(f"CAD test failed for peak={det_peak}, min={det_min}: {e}")
|
||||
|
||||
|
||||
# Delay between tests
|
||||
if self.running and delay_ms > 0:
|
||||
time.sleep(delay_ms / 1000.0)
|
||||
|
||||
|
||||
if self.running:
|
||||
# Find best result based on sensitivity score (not just detection rate)
|
||||
best_result = None
|
||||
recommended_result = None
|
||||
if self.results:
|
||||
# Find result with highest sensitivity score (best balance)
|
||||
best_result = max(self.results.values(), key=lambda x: x.get('sensitivity_score', 0))
|
||||
|
||||
best_result = max(
|
||||
self.results.values(), key=lambda x: x.get("sensitivity_score", 0)
|
||||
)
|
||||
|
||||
# Also find result with ideal adjusted detection rate (10-30%)
|
||||
ideal_results = [r for r in self.results.values() if 10 <= r.get('adjusted_rate', 0) <= 30]
|
||||
ideal_results = [
|
||||
r for r in self.results.values() if 10 <= r.get("adjusted_rate", 0) <= 30
|
||||
]
|
||||
if ideal_results:
|
||||
# Among ideal results, pick the one with best sensitivity score
|
||||
recommended_result = max(ideal_results, key=lambda x: x.get('sensitivity_score', 0))
|
||||
recommended_result = max(
|
||||
ideal_results, key=lambda x: x.get("sensitivity_score", 0)
|
||||
)
|
||||
else:
|
||||
recommended_result = best_result
|
||||
|
||||
self.broadcast_to_clients({
|
||||
"type": "completed",
|
||||
"message": "Calibration completed",
|
||||
"results": {
|
||||
"best": best_result,
|
||||
"recommended": recommended_result,
|
||||
"total_tests": len(self.results)
|
||||
} if best_result else None
|
||||
})
|
||||
|
||||
self.broadcast_to_clients(
|
||||
{
|
||||
"type": "completed",
|
||||
"message": "Calibration completed",
|
||||
"results": (
|
||||
{
|
||||
"best": best_result,
|
||||
"recommended": recommended_result,
|
||||
"total_tests": len(self.results),
|
||||
}
|
||||
if best_result
|
||||
else None
|
||||
),
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.broadcast_to_clients({"type": "status", "message": "Calibration stopped"})
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Calibration worker error: {e}")
|
||||
self.broadcast_to_clients({"type": "error", "message": str(e)})
|
||||
finally:
|
||||
self.running = False
|
||||
|
||||
|
||||
def start_calibration(self, samples: int = 8, delay_ms: int = 100):
|
||||
|
||||
|
||||
if self.running:
|
||||
return False
|
||||
|
||||
|
||||
self.running = True
|
||||
self.results.clear()
|
||||
self.progress = {"current": 0, "total": 0}
|
||||
self.clear_message_queue() # Clear any old messages
|
||||
|
||||
|
||||
# Start calibration in separate thread
|
||||
self.calibration_thread = threading.Thread(
|
||||
target=self.calibration_worker,
|
||||
args=(samples, delay_ms)
|
||||
target=self.calibration_worker, args=(samples, delay_ms)
|
||||
)
|
||||
self.calibration_thread.daemon = True
|
||||
self.calibration_thread.start()
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def stop_calibration(self):
|
||||
|
||||
|
||||
self.running = False
|
||||
if self.calibration_thread:
|
||||
self.calibration_thread.join(timeout=2)
|
||||
|
||||
|
||||
def clear_message_queue(self):
|
||||
|
||||
if hasattr(self, 'message_queue'):
|
||||
self.message_queue.clear()
|
||||
|
||||
if hasattr(self, "message_queue"):
|
||||
self.message_queue.clear()
|
||||
|
||||
@@ -0,0 +1,634 @@
|
||||
"""
|
||||
Companion Bridge REST API and SSE event stream endpoints.
|
||||
|
||||
Mounted as a nested CherryPy object at /api/companion/ via APIEndpoints.
|
||||
Provides browser-accessible REST endpoints that proxy into the CompanionBridge
|
||||
async methods, plus a Server-Sent Events stream for real-time push callbacks.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
import cherrypy
|
||||
|
||||
from repeater.companion.utils import validate_companion_node_name
|
||||
|
||||
from .auth.middleware import require_auth
|
||||
|
||||
logger = logging.getLogger("CompanionAPI")
|
||||
|
||||
|
||||
class CompanionAPIEndpoints:
|
||||
"""REST + SSE endpoints for a companion bridge.
|
||||
|
||||
CherryPy auto-mounts this at ``/api/companion/`` when assigned as
|
||||
``APIEndpoints.companion``. All async bridge calls are dispatched
|
||||
to the daemon's event loop via ``asyncio.run_coroutine_threadsafe``.
|
||||
"""
|
||||
|
||||
def __init__(self, daemon_instance=None, event_loop=None, config=None, config_manager=None):
|
||||
self.daemon_instance = daemon_instance
|
||||
self.event_loop = event_loop
|
||||
self.config = config or {}
|
||||
self.config_manager = config_manager
|
||||
|
||||
# SSE clients: each gets a thread-safe queue
|
||||
self._sse_clients: list[queue.Queue] = []
|
||||
self._sse_lock = threading.Lock()
|
||||
|
||||
# Flag: have we registered push callbacks yet?
|
||||
self._callbacks_registered = False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_bridge(self, name: Optional[str] = None, companion_hash: Optional[int] = None):
|
||||
"""Return the companion bridge, or raise 503/404 if unavailable.
|
||||
|
||||
Resolution order (mirrors room-server pattern):
|
||||
1. *name* — look up via identity_manager by registered name.
|
||||
2. *companion_hash* — direct lookup in ``companion_bridges`` dict.
|
||||
3. Neither — return the first (and typically only) bridge.
|
||||
"""
|
||||
if not self.daemon_instance:
|
||||
raise cherrypy.HTTPError(503, "Daemon not initialized")
|
||||
bridges = getattr(self.daemon_instance, "companion_bridges", {})
|
||||
if not bridges:
|
||||
raise cherrypy.HTTPError(503, "No companion bridges configured")
|
||||
|
||||
# --- resolve by name via identity_manager (same pattern as room servers) ---
|
||||
if name is not None:
|
||||
identity_manager = getattr(self.daemon_instance, "identity_manager", None)
|
||||
if identity_manager:
|
||||
for reg_name, identity, _cfg in identity_manager.get_identities_by_type(
|
||||
"companion"
|
||||
):
|
||||
if reg_name == name:
|
||||
hash_byte = identity.get_public_key()[0]
|
||||
bridge = bridges.get(hash_byte)
|
||||
if bridge:
|
||||
return bridge
|
||||
raise cherrypy.HTTPError(404, f"Companion '{name}' not found")
|
||||
|
||||
# --- resolve by hash (fallback) ---
|
||||
if companion_hash is not None:
|
||||
bridge = bridges.get(companion_hash)
|
||||
if not bridge:
|
||||
msg = f"Companion 0x{companion_hash:02X} not found" # noqa: E231
|
||||
raise cherrypy.HTTPError(404, msg)
|
||||
return bridge
|
||||
|
||||
# --- default: first bridge ---
|
||||
return next(iter(bridges.values()))
|
||||
|
||||
def _resolve_bridge_params(self, params) -> dict:
|
||||
"""Extract optional companion name/hash from request params.
|
||||
|
||||
Returns kwargs suitable for ``_get_bridge(**result)``.
|
||||
Follows the room-server convention: ``companion_name`` is the
|
||||
primary selector, ``companion_hash`` is the fallback.
|
||||
"""
|
||||
name = params.get("companion_name")
|
||||
raw_hash = params.get("companion_hash")
|
||||
result: dict = {}
|
||||
if name is not None:
|
||||
result["name"] = str(name)
|
||||
elif raw_hash is not None:
|
||||
try:
|
||||
result["companion_hash"] = int(str(raw_hash), 0)
|
||||
except (ValueError, TypeError):
|
||||
raise cherrypy.HTTPError(400, "Invalid companion_hash")
|
||||
return result
|
||||
|
||||
def _run_async(self, coro, timeout: float = 30.0):
|
||||
"""Run an async coroutine on the daemon event loop and return result."""
|
||||
if self.event_loop is None:
|
||||
raise cherrypy.HTTPError(503, "Event loop not available")
|
||||
future = asyncio.run_coroutine_threadsafe(coro, self.event_loop)
|
||||
return future.result(timeout=timeout)
|
||||
|
||||
@staticmethod
|
||||
def _success(data, **kwargs):
|
||||
result = {"success": True, "data": data}
|
||||
result.update(kwargs)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _error(msg):
|
||||
return {"success": False, "error": str(msg)}
|
||||
|
||||
def _require_post(self):
|
||||
if cherrypy.request.method != "POST":
|
||||
cherrypy.response.headers["Allow"] = "POST"
|
||||
raise cherrypy.HTTPError(405, "Method not allowed. Use POST.")
|
||||
|
||||
def _get_json_body(self) -> dict:
|
||||
"""Read and parse the JSON request body."""
|
||||
try:
|
||||
raw = cherrypy.request.body.read()
|
||||
return json.loads(raw) if raw else {}
|
||||
except (json.JSONDecodeError, ValueError) as exc:
|
||||
raise cherrypy.HTTPError(400, f"Invalid JSON body: {exc}")
|
||||
|
||||
def _pub_key_from_hex(self, hex_str: str) -> bytes:
|
||||
"""Decode a hex public key, raising 400 on error."""
|
||||
try:
|
||||
key = bytes.fromhex(hex_str)
|
||||
if len(key) != 32:
|
||||
raise ValueError("Expected 32-byte key")
|
||||
return key
|
||||
except (ValueError, TypeError) as exc:
|
||||
raise cherrypy.HTTPError(400, f"Invalid public key: {exc}")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SSE push-event plumbing
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _ensure_callbacks(self):
|
||||
"""Register push callbacks on the bridge (once)."""
|
||||
if self._callbacks_registered:
|
||||
return
|
||||
try:
|
||||
bridge = self._get_bridge()
|
||||
except cherrypy.HTTPError:
|
||||
return # bridge not yet available
|
||||
|
||||
def _make_cb(event_name):
|
||||
"""Create a callback that serialises event data for SSE clients."""
|
||||
|
||||
def _cb(*args, **kwargs):
|
||||
payload = self._serialise_event(event_name, args, kwargs)
|
||||
self._broadcast_sse(payload)
|
||||
|
||||
return _cb
|
||||
|
||||
callback_names = [
|
||||
"message_received",
|
||||
"channel_message_received",
|
||||
"advert_received",
|
||||
"contact_path_updated",
|
||||
"send_confirmed",
|
||||
"login_result",
|
||||
]
|
||||
for name in callback_names:
|
||||
register_fn = getattr(bridge, f"on_{name}", None)
|
||||
if register_fn:
|
||||
register_fn(_make_cb(name))
|
||||
|
||||
self._callbacks_registered = True
|
||||
|
||||
@staticmethod
|
||||
def _serialise_event(event_name: str, args: tuple, kwargs: dict) -> dict:
|
||||
"""Convert callback arguments to a JSON-safe dict."""
|
||||
data: dict = {"event": event_name, "timestamp": int(time.time())}
|
||||
for i, arg in enumerate(args):
|
||||
data[f"arg{i}"] = _to_json_safe(arg)
|
||||
for k, v in kwargs.items():
|
||||
data[k] = _to_json_safe(v)
|
||||
return data
|
||||
|
||||
def _broadcast_sse(self, payload: dict):
|
||||
"""Put *payload* into every active SSE client queue."""
|
||||
with self._sse_lock:
|
||||
dead = []
|
||||
for q in self._sse_clients:
|
||||
try:
|
||||
q.put_nowait(payload)
|
||||
except queue.Full:
|
||||
dead.append(q)
|
||||
for q in dead:
|
||||
self._sse_clients.remove(q)
|
||||
|
||||
# ==================================================================
|
||||
# REST Endpoints
|
||||
# ==================================================================
|
||||
|
||||
# ----- Index / listing -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def index(self, **kwargs):
|
||||
"""GET /api/companion/ — list configured companions."""
|
||||
bridges = getattr(self.daemon_instance, "companion_bridges", {})
|
||||
identity_manager = getattr(self.daemon_instance, "identity_manager", None)
|
||||
|
||||
# Build name lookup from identity_manager (same pattern as room servers)
|
||||
name_by_hash: dict[int, str] = {}
|
||||
if identity_manager:
|
||||
for reg_name, identity, _cfg in identity_manager.get_identities_by_type("companion"):
|
||||
name_by_hash[identity.get_public_key()[0]] = reg_name
|
||||
|
||||
items = []
|
||||
for h, b in bridges.items():
|
||||
items.append(
|
||||
{
|
||||
"companion_name": name_by_hash.get(h, ""),
|
||||
"companion_hash": f"0x{h:02X}", # noqa: E231
|
||||
"node_name": b.prefs.node_name,
|
||||
"public_key": b.get_public_key().hex(),
|
||||
"is_running": b.is_running,
|
||||
"contacts_count": b.contacts.get_count(),
|
||||
"channels_count": b.channels.get_count(),
|
||||
}
|
||||
)
|
||||
return self._success(items)
|
||||
|
||||
# ----- Identity -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def self_info(self, **kwargs):
|
||||
"""GET /api/companion/self_info — node identity and preferences."""
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(kwargs))
|
||||
prefs = bridge.get_self_info()
|
||||
return self._success(
|
||||
{
|
||||
"public_key": bridge.get_public_key().hex(),
|
||||
"node_name": prefs.node_name,
|
||||
"adv_type": prefs.adv_type,
|
||||
"tx_power_dbm": prefs.tx_power_dbm,
|
||||
"frequency_hz": prefs.frequency_hz,
|
||||
"bandwidth_hz": prefs.bandwidth_hz,
|
||||
"spreading_factor": prefs.spreading_factor,
|
||||
"coding_rate": prefs.coding_rate,
|
||||
"latitude": prefs.latitude,
|
||||
"longitude": prefs.longitude,
|
||||
}
|
||||
)
|
||||
|
||||
# ----- Contacts -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def contacts(self, **kwargs):
|
||||
"""GET /api/companion/contacts — list all contacts."""
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(kwargs))
|
||||
since = int(kwargs.get("since", 0))
|
||||
contacts = bridge.get_contacts(since=since)
|
||||
items = []
|
||||
for c in contacts:
|
||||
items.append(
|
||||
{
|
||||
"public_key": (
|
||||
c.public_key.hex() if isinstance(c.public_key, bytes) else c.public_key
|
||||
),
|
||||
"name": c.name,
|
||||
"adv_type": c.adv_type,
|
||||
"flags": c.flags,
|
||||
"out_path_len": c.out_path_len,
|
||||
"last_advert_timestamp": c.last_advert_timestamp,
|
||||
"lastmod": c.lastmod,
|
||||
"gps_lat": c.gps_lat,
|
||||
"gps_lon": c.gps_lon,
|
||||
}
|
||||
)
|
||||
return self._success(items)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def contact(self, **kwargs):
|
||||
"""GET /api/companion/contact?pub_key=<hex> — get single contact."""
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(kwargs))
|
||||
pk_hex = kwargs.get("pub_key")
|
||||
if not pk_hex:
|
||||
raise cherrypy.HTTPError(400, "pub_key required")
|
||||
pub_key = self._pub_key_from_hex(pk_hex)
|
||||
c = bridge.get_contact_by_key(pub_key)
|
||||
if not c:
|
||||
raise cherrypy.HTTPError(404, "Contact not found")
|
||||
return self._success(
|
||||
{
|
||||
"public_key": (
|
||||
c.public_key.hex() if isinstance(c.public_key, bytes) else c.public_key
|
||||
),
|
||||
"name": c.name,
|
||||
"adv_type": c.adv_type,
|
||||
"flags": c.flags,
|
||||
"out_path_len": c.out_path_len,
|
||||
"out_path": c.out_path.hex() if isinstance(c.out_path, bytes) else "",
|
||||
"last_advert_timestamp": c.last_advert_timestamp,
|
||||
"lastmod": c.lastmod,
|
||||
"gps_lat": c.gps_lat,
|
||||
"gps_lon": c.gps_lon,
|
||||
}
|
||||
)
|
||||
|
||||
# ----- Channels -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def channels(self, **kwargs):
|
||||
"""GET /api/companion/channels — list configured channels."""
|
||||
try:
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(kwargs))
|
||||
items = []
|
||||
for idx in range(bridge.channels.max_channels):
|
||||
ch = bridge.channels.get(idx)
|
||||
if ch:
|
||||
items.append(
|
||||
{
|
||||
"index": idx,
|
||||
"name": ch.name,
|
||||
# Don't expose the PSK secret over REST
|
||||
}
|
||||
)
|
||||
return self._success(items)
|
||||
except cherrypy.HTTPError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"channels endpoint error: {exc}", exc_info=True)
|
||||
return self._error(str(exc))
|
||||
|
||||
# ----- Statistics -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def stats(self, **kwargs):
|
||||
"""GET /api/companion/stats?type=packets — local companion stats."""
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(kwargs))
|
||||
stats_type_map = {"core": 0, "radio": 1, "packets": 2}
|
||||
stype = stats_type_map.get(kwargs.get("type", "packets"), 2)
|
||||
return self._success(bridge.get_stats(stype))
|
||||
|
||||
# ----- Messaging -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def send_text(self, **kwargs):
|
||||
"""POST /api/companion/send_text {pub_key, text, txt_type?, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
text = body.get("text", "")
|
||||
if not text:
|
||||
raise cherrypy.HTTPError(400, "text required")
|
||||
txt_type = int(body.get("txt_type", 0))
|
||||
result = self._run_async(bridge.send_text_message(pub_key, text, txt_type=txt_type))
|
||||
return self._success(
|
||||
{
|
||||
"sent": result.success,
|
||||
"is_flood": result.is_flood,
|
||||
"expected_ack": result.expected_ack,
|
||||
}
|
||||
)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def send_channel_message(self, **kwargs):
|
||||
"""POST /api/companion/send_channel_message {channel_idx, text, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
channel_idx = int(body.get("channel_idx", 0))
|
||||
text = body.get("text", "")
|
||||
if not text:
|
||||
raise cherrypy.HTTPError(400, "text required")
|
||||
success = self._run_async(bridge.send_channel_message(channel_idx, text))
|
||||
return self._success({"sent": success})
|
||||
|
||||
# ----- Login -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def login(self, **kwargs):
|
||||
"""POST /api/companion/login {pub_key, password?, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
password = body.get("password", "")
|
||||
result = self._run_async(bridge.send_login(pub_key, password), timeout=15.0)
|
||||
return self._success(_to_json_safe(result))
|
||||
|
||||
# ----- Status / Telemetry Requests -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def request_status(self, **kwargs):
|
||||
"""POST /api/companion/request_status {pub_key, timeout?, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
timeout = float(body.get("timeout", 15.0))
|
||||
result = self._run_async(
|
||||
bridge.send_status_request(pub_key, timeout=timeout),
|
||||
timeout=timeout + 5.0,
|
||||
)
|
||||
return self._success(_to_json_safe(result))
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def request_telemetry(self, **kwargs):
|
||||
"""POST /api/companion/request_telemetry.
|
||||
|
||||
Body: pub_key, want_base?, want_location?, want_environment?,
|
||||
timeout?, companion_name?
|
||||
|
||||
On success, telemetry_data includes raw_bytes (LPP hex), sensors (parsed),
|
||||
and frame_bytes (hex): companion-style frame 0x8B + 0 + 6B pubkey prefix + LPP.
|
||||
"""
|
||||
self._require_post()
|
||||
try:
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
timeout = float(body.get("timeout", 20.0))
|
||||
result = self._run_async(
|
||||
bridge.send_telemetry_request(
|
||||
pub_key,
|
||||
want_base=bool(body.get("want_base", True)),
|
||||
want_location=bool(body.get("want_location", True)),
|
||||
want_environment=bool(body.get("want_environment", True)),
|
||||
timeout=timeout,
|
||||
),
|
||||
timeout=timeout + 5.0,
|
||||
)
|
||||
# Ensure all values are JSON-serialisable (telemetry may contain bytes)
|
||||
return self._success(_to_json_safe(result))
|
||||
except cherrypy.HTTPError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"request_telemetry endpoint error: {exc}", exc_info=True)
|
||||
return self._error(str(exc))
|
||||
|
||||
# ----- Repeater Commands -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def send_command(self, **kwargs):
|
||||
"""POST /api/companion/send_command {pub_key, command, parameters?, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
command = body.get("command", "")
|
||||
if not command:
|
||||
raise cherrypy.HTTPError(400, "command required")
|
||||
parameters = body.get("parameters")
|
||||
result = self._run_async(
|
||||
bridge.send_repeater_command(pub_key, command, parameters),
|
||||
timeout=20.0,
|
||||
)
|
||||
return self._success(_to_json_safe(result))
|
||||
|
||||
# ----- Path / Routing -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def reset_path(self, **kwargs):
|
||||
"""POST /api/companion/reset_path {pub_key, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
pub_key = self._pub_key_from_hex(body.get("pub_key", ""))
|
||||
ok = bridge.reset_path(pub_key)
|
||||
return self._success({"reset": ok})
|
||||
|
||||
# ----- Device Configuration -----
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def set_advert_name(self, **kwargs):
|
||||
"""POST /api/companion/set_advert_name {advert_name, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
name = body.get("advert_name", body.get("name", ""))
|
||||
if not name:
|
||||
raise cherrypy.HTTPError(400, "name required")
|
||||
try:
|
||||
validated_name = validate_companion_node_name(name)
|
||||
except ValueError as e:
|
||||
raise cherrypy.HTTPError(400, str(e)) from e
|
||||
bridge.set_advert_name(validated_name)
|
||||
# Optionally sync node_name to config.yaml so it survives restart
|
||||
companion_name = body.get("companion_name")
|
||||
if companion_name is None and getattr(self.daemon_instance, "identity_manager", None):
|
||||
pubkey = bridge.get_public_key()
|
||||
for reg_name, identity, _ in self.daemon_instance.identity_manager.get_identities_by_type(
|
||||
"companion"
|
||||
):
|
||||
if identity.get_public_key() == pubkey:
|
||||
companion_name = reg_name
|
||||
break
|
||||
if companion_name and self.config_manager:
|
||||
companions = (self.config.get("identities") or {}).get("companions") or []
|
||||
for entry in companions:
|
||||
if entry.get("name") == companion_name:
|
||||
if "settings" not in entry:
|
||||
entry["settings"] = {}
|
||||
entry["settings"]["node_name"] = validated_name
|
||||
try:
|
||||
if not self.config_manager.save_to_file():
|
||||
logger.warning("Failed to save config after set_advert_name")
|
||||
except Exception as e:
|
||||
logger.warning("Error saving config after set_advert_name: %s", e)
|
||||
break
|
||||
return self._success({"name": bridge.prefs.node_name})
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@require_auth
|
||||
def set_advert_location(self, **kwargs):
|
||||
"""POST /api/companion/set_advert_location {latitude, longitude, companion_name?}"""
|
||||
self._require_post()
|
||||
body = self._get_json_body()
|
||||
bridge = self._get_bridge(**self._resolve_bridge_params(body))
|
||||
lat = float(body.get("latitude", 0.0))
|
||||
lon = float(body.get("longitude", 0.0))
|
||||
bridge.set_advert_latlon(lat, lon)
|
||||
return self._success({"latitude": lat, "longitude": lon})
|
||||
|
||||
# ==================================================================
|
||||
# SSE Event Stream
|
||||
# ==================================================================
|
||||
|
||||
@cherrypy.expose
|
||||
def events(self, **kwargs):
|
||||
"""GET /api/companion/events — Server-Sent Events stream for push callbacks.
|
||||
|
||||
Connect with ``EventSource('/api/companion/events?token=JWT')``.
|
||||
Auth is handled by the CherryPy tool-level require_auth (supports
|
||||
query-param JWT tokens needed by the browser EventSource API).
|
||||
"""
|
||||
self._ensure_callbacks()
|
||||
|
||||
cherrypy.response.headers["Content-Type"] = "text/event-stream"
|
||||
cherrypy.response.headers["Cache-Control"] = "no-cache"
|
||||
cherrypy.response.headers["Connection"] = "keep-alive"
|
||||
cherrypy.response.headers["X-Accel-Buffering"] = "no"
|
||||
|
||||
client_queue: queue.Queue = queue.Queue(maxsize=256)
|
||||
with self._sse_lock:
|
||||
self._sse_clients.append(client_queue)
|
||||
|
||||
def generate():
|
||||
try:
|
||||
payload = {"event": "connected", "timestamp": int(time.time())}
|
||||
yield f"data: {json.dumps(payload)}\n\n"
|
||||
|
||||
while True:
|
||||
try:
|
||||
item = client_queue.get(timeout=15.0)
|
||||
yield f"data: {json.dumps(item)}\n\n"
|
||||
except queue.Empty:
|
||||
# Keep-alive comment
|
||||
payload = {"event": "keepalive", "timestamp": int(time.time())}
|
||||
yield f"data: {json.dumps(payload)}\n\n"
|
||||
except GeneratorExit:
|
||||
pass
|
||||
except Exception as exc:
|
||||
logger.debug(f"SSE stream ended: {exc}")
|
||||
finally:
|
||||
with self._sse_lock:
|
||||
if client_queue in self._sse_clients:
|
||||
self._sse_clients.remove(client_queue)
|
||||
|
||||
return generate()
|
||||
|
||||
events._cp_config = {"response.stream": True}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Utility: make arbitrary objects JSON-serialisable for SSE events
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def _to_json_safe(obj):
|
||||
"""Convert common companion objects to JSON-safe dicts/values."""
|
||||
if obj is None or isinstance(obj, (bool, int, float, str)):
|
||||
return obj
|
||||
if isinstance(obj, bytes):
|
||||
return obj.hex()
|
||||
if isinstance(obj, bytearray):
|
||||
return bytes(obj).hex()
|
||||
if isinstance(obj, dict):
|
||||
return {k: _to_json_safe(v) for k, v in obj.items()}
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return [_to_json_safe(v) for v in obj]
|
||||
# Dataclass / namedtuple with __dict__
|
||||
if hasattr(obj, "__dict__"):
|
||||
return {k: _to_json_safe(v) for k, v in obj.__dict__.items() if not k.startswith("_")}
|
||||
return str(obj)
|
||||
+1
-1
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
@@ -1 +1 @@
|
||||
import{a as p,b as n,g as m,e as t,s as g,t as s,j as d,p as l}from"./index-BfUIlcDy.js";const f={class:"flex items-center justify-between mb-4"},w={class:"text-xl font-semibold text-content-primary dark:text-content-primary"},v={class:"mb-6"},h={key:0,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},y={key:1,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},C={key:2,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},B={class:"text-content-secondary dark:text-content-primary/80 text-base leading-relaxed"},j={class:"flex gap-3"},_=p({__name:"ConfirmDialog",props:{show:{type:Boolean},title:{default:"Confirm Action"},message:{},confirmText:{default:"Confirm"},cancelText:{default:"Cancel"},variant:{default:"warning"}},emits:["close","confirm"],setup(c,{emit:b}){const o=c,r=b,u=i=>{i.target===i.currentTarget&&r("close")},k={danger:"bg-red-100 dark:bg-red-500/20 border-red-500/30 text-red-600 dark:text-red-400",warning:"bg-yellow-100 dark:bg-yellow-500/20 border-yellow-500/30 text-yellow-600 dark:text-yellow-400",info:"bg-blue-500/20 border-blue-500/30 text-blue-600 dark:text-blue-400"},x={danger:"bg-red-500 hover:bg-red-600",warning:"bg-yellow-500 hover:bg-yellow-600",info:"bg-blue-500 hover:bg-blue-600"};return(i,e)=>o.show?(l(),n("div",{key:0,onClick:u,class:"fixed inset-0 bg-black/40 backdrop-blur-lg z-[99999] flex items-center justify-center p-4",style:{"backdrop-filter":"blur(8px) saturate(180%)",position:"fixed",top:"0",left:"0",right:"0",bottom:"0"}},[t("div",{class:"bg-white dark:bg-surface-elevated backdrop-blur-xl rounded-[20px] p-6 w-full max-w-md border border-stroke-subtle dark:border-white/10",onClick:e[3]||(e[3]=g(()=>{},["stop"]))},[t("div",f,[t("h3",w,s(o.title),1),t("button",{onClick:e[0]||(e[0]=a=>r("close")),class:"text-content-secondary dark:text-content-muted hover:text-content-primary dark:hover:text-content-primary transition-colors"},e[4]||(e[4]=[t("svg",{class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M6 18L18 6M6 6l12 12"})],-1)]))]),t("div",v,[t("div",{class:d(["inline-flex p-3 rounded-xl mb-4",k[o.variant]])},[o.variant==="danger"?(l(),n("svg",h,e[5]||(e[5]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"},null,-1)]))):o.variant==="warning"?(l(),n("svg",y,e[6]||(e[6]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"},null,-1)]))):(l(),n("svg",C,e[7]||(e[7]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"},null,-1)])))],2),t("p",B,s(o.message),1)]),t("div",j,[t("button",{onClick:e[1]||(e[1]=a=>r("close")),class:"flex-1 px-4 py-3 rounded-xl bg-background-mute dark:bg-white/5 hover:bg-stroke-subtle dark:hover:bg-white/10 text-content-primary dark:text-content-primary transition-all duration-200 border border-stroke-subtle dark:border-stroke/10"},s(o.cancelText),1),t("button",{onClick:e[2]||(e[2]=a=>r("confirm")),class:d(["flex-1 px-4 py-3 rounded-xl text-white transition-all duration-200",x[o.variant]])},s(o.confirmText),3)])])])):m("",!0)}});export{_};
|
||||
import{a as p,b as n,g as m,e as t,s as g,t as s,j as d,p as l}from"./index-DyUIpN7m.js";const f={class:"flex items-center justify-between mb-4"},w={class:"text-xl font-semibold text-content-primary dark:text-content-primary"},v={class:"mb-6"},h={key:0,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},y={key:1,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},C={key:2,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},B={class:"text-content-secondary dark:text-content-primary/80 text-base leading-relaxed"},j={class:"flex gap-3"},_=p({__name:"ConfirmDialog",props:{show:{type:Boolean},title:{default:"Confirm Action"},message:{},confirmText:{default:"Confirm"},cancelText:{default:"Cancel"},variant:{default:"warning"}},emits:["close","confirm"],setup(c,{emit:b}){const o=c,r=b,u=i=>{i.target===i.currentTarget&&r("close")},k={danger:"bg-red-100 dark:bg-red-500/20 border-red-500/30 text-red-600 dark:text-red-400",warning:"bg-yellow-100 dark:bg-yellow-500/20 border-yellow-500/30 text-yellow-600 dark:text-yellow-400",info:"bg-blue-500/20 border-blue-500/30 text-blue-600 dark:text-blue-400"},x={danger:"bg-red-500 hover:bg-red-600",warning:"bg-yellow-500 hover:bg-yellow-600",info:"bg-blue-500 hover:bg-blue-600"};return(i,e)=>o.show?(l(),n("div",{key:0,onClick:u,class:"fixed inset-0 bg-black/40 backdrop-blur-lg z-[99999] flex items-center justify-center p-4",style:{"backdrop-filter":"blur(8px) saturate(180%)",position:"fixed",top:"0",left:"0",right:"0",bottom:"0"}},[t("div",{class:"bg-white dark:bg-surface-elevated backdrop-blur-xl rounded-[20px] p-6 w-full max-w-md border border-stroke-subtle dark:border-white/10",onClick:e[3]||(e[3]=g(()=>{},["stop"]))},[t("div",f,[t("h3",w,s(o.title),1),t("button",{onClick:e[0]||(e[0]=a=>r("close")),class:"text-content-secondary dark:text-content-muted hover:text-content-primary dark:hover:text-content-primary transition-colors"},e[4]||(e[4]=[t("svg",{class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M6 18L18 6M6 6l12 12"})],-1)]))]),t("div",v,[t("div",{class:d(["inline-flex p-3 rounded-xl mb-4",k[o.variant]])},[o.variant==="danger"?(l(),n("svg",h,e[5]||(e[5]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"},null,-1)]))):o.variant==="warning"?(l(),n("svg",y,e[6]||(e[6]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"},null,-1)]))):(l(),n("svg",C,e[7]||(e[7]=[t("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"},null,-1)])))],2),t("p",B,s(o.message),1)]),t("div",j,[t("button",{onClick:e[1]||(e[1]=a=>r("close")),class:"flex-1 px-4 py-3 rounded-xl bg-background-mute dark:bg-white/5 hover:bg-stroke-subtle dark:hover:bg-white/10 text-content-primary dark:text-content-primary transition-all duration-200 border border-stroke-subtle dark:border-stroke/10"},s(o.cancelText),1),t("button",{onClick:e[2]||(e[2]=a=>r("confirm")),class:d(["flex-1 px-4 py-3 rounded-xl text-white transition-all duration-200",x[o.variant]])},s(o.confirmText),3)])])])):m("",!0)}});export{_};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
@@ -1 +1 @@
|
||||
import{a as e,b as r,i as o,p as n}from"./index-BfUIlcDy.js";const d=e({name:"HelpView",__name:"Help",setup(a){return(i,t)=>(n(),r("div",null,t[0]||(t[0]=[o('<div class="glass-card backdrop-blur border border-stroke-subtle dark:border-white/10 rounded-[15px] p-8"><h1 class="text-content-primary dark:text-content-primary text-2xl font-semibold mb-6">Help & Documentation</h1><div class="text-center py-12"><div class="text-primary mb-6"><svg class="w-20 h-20 mx-auto mb-4" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.746 0 3.332.477 4.5 1.253v13C19.832 18.477 18.246 18 16.5 18c-1.746 0-3.332.477-4.5 1.253"></path></svg></div><h2 class="text-content-primary dark:text-content-primary text-xl font-medium mb-3">pyMC Repeater Wiki</h2><p class="text-content-secondary dark:text-content-muted mb-8 max-w-md mx-auto"> Access documentation, setup guides, troubleshooting tips, and community resources on our official wiki. </p><a href="https://github.com/rightup/pyMC_Repeater/wiki" target="_blank" rel="noopener noreferrer" class="inline-flex items-center gap-2 bg-primary hover:bg-primary/80 text-white dark:text-background font-medium py-3 px-6 rounded-xl transition-colors duration-200"><svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 6H6a2 2 0 00-2 2v10a2 2 0 002 2h10a2 2 0 002-2v-4M14 4h6m0 0v6m0-6L10 14"></path></svg> Visit Wiki Documentation </a><div class="mt-8 text-xs text-content-muted dark:text-content-muted"> Opens in a new tab </div></div></div>',1)])))}});export{d as default};
|
||||
import{a as e,b as r,i as o,p as n}from"./index-DyUIpN7m.js";const d=e({name:"HelpView",__name:"Help",setup(a){return(i,t)=>(n(),r("div",null,t[0]||(t[0]=[o('<div class="glass-card backdrop-blur border border-stroke-subtle dark:border-white/10 rounded-[15px] p-8"><h1 class="text-content-primary dark:text-content-primary text-2xl font-semibold mb-6">Help & Documentation</h1><div class="text-center py-12"><div class="text-primary mb-6"><svg class="w-20 h-20 mx-auto mb-4" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.746 0 3.332.477 4.5 1.253v13C19.832 18.477 18.246 18 16.5 18c-1.746 0-3.332.477-4.5 1.253"></path></svg></div><h2 class="text-content-primary dark:text-content-primary text-xl font-medium mb-3">pyMC Repeater Wiki</h2><p class="text-content-secondary dark:text-content-muted mb-8 max-w-md mx-auto"> Access documentation, setup guides, troubleshooting tips, and community resources on our official wiki. </p><a href="https://github.com/rightup/pyMC_Repeater/wiki" target="_blank" rel="noopener noreferrer" class="inline-flex items-center gap-2 bg-primary hover:bg-primary/80 text-white dark:text-background font-medium py-3 px-6 rounded-xl transition-colors duration-200"><svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 6H6a2 2 0 00-2 2v10a2 2 0 002 2h10a2 2 0 002-2v-4M14 4h6m0 0v6m0-6L10 14"></path></svg> Visit Wiki Documentation </a><div class="mt-8 text-xs text-content-muted dark:text-content-muted"> Opens in a new tab </div></div></div>',1)])))}});export{d as default};
|
||||
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
@@ -0,0 +1 @@
|
||||
import{a as k,b as o,g,e as r,j as a,t as p,s as x,p as s}from"./index-DyUIpN7m.js";const f={class:"mb-6"},m={key:0,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},v={key:1,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},h={key:2,class:"w-6 h-6",fill:"none",stroke:"currentColor",viewBox:"0 0 24 24"},w={class:"text-content-secondary dark:text-content-primary/80 text-base leading-relaxed"},C={class:"flex"},B=k({__name:"MessageDialog",props:{show:{type:Boolean},message:{},variant:{default:"success"}},emits:["close"],setup(i,{emit:d}){const t=i,l=d,c=n=>{n.target===n.currentTarget&&l("close")},b={success:"bg-green-100 dark:bg-green-500/20 border-green-600/40 dark:border-green-500/30 text-green-600 dark:text-green-400",error:"bg-red-100 dark:bg-red-500/20 border-red-500/30 text-red-600 dark:text-red-400",info:"bg-blue-500/20 border-blue-500/30 text-blue-600 dark:text-blue-400"},u={success:"bg-green-500 hover:bg-green-600",error:"bg-red-500 hover:bg-red-600",info:"bg-blue-500 hover:bg-blue-600"};return(n,e)=>t.show?(s(),o("div",{key:0,onClick:c,class:"fixed inset-0 bg-black/40 backdrop-blur-lg z-[99999] flex items-center justify-center p-4",style:{"backdrop-filter":"blur(8px) saturate(180%)",position:"fixed",top:"0",left:"0",right:"0",bottom:"0"}},[r("div",{class:"bg-white dark:bg-surface-elevated backdrop-blur-xl rounded-[20px] p-6 w-full max-w-md border border-stroke-subtle dark:border-white/10",onClick:e[1]||(e[1]=x(()=>{},["stop"]))},[r("div",f,[r("div",{class:a(["inline-flex p-3 rounded-xl mb-4",b[t.variant]])},[t.variant==="success"?(s(),o("svg",m,e[2]||(e[2]=[r("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M5 13l4 4L19 7"},null,-1)]))):t.variant==="error"?(s(),o("svg",v,e[3]||(e[3]=[r("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M6 18L18 6M6 6l12 12"},null,-1)]))):(s(),o("svg",h,e[4]||(e[4]=[r("path",{"stroke-linecap":"round","stroke-linejoin":"round","stroke-width":"2",d:"M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"},null,-1)])))],2),r("p",w,p(t.message),1)]),r("div",C,[r("button",{onClick:e[0]||(e[0]=y=>l("close")),class:a(["flex-1 px-4 py-3 rounded-xl text-white transition-all duration-200",u[t.variant]])}," OK ",2)])])])):g("",!0)}});export{B as _};
|
||||
+1
-1
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
File diff suppressed because one or more lines are too long
+1
-1
@@ -1,4 +1,4 @@
|
||||
import{L as J,a as zl,r as ut,o as Hl,$ as Ul,P as Rn,D as ql,b as tt,e as Z,g as Yt,t as Is,w as Kl,v as Vl,X as Ji,j as Tn,s as jl,p as it,x as Gl}from"./index-BfUIlcDy.js";/**
|
||||
import{L as J,a as zl,r as ut,o as Hl,$ as Ul,P as Rn,D as ql,b as tt,e as Z,g as Yt,t as Is,w as Kl,v as Vl,X as Ji,j as Tn,s as jl,p as it,x as Gl}from"./index-DyUIpN7m.js";/**
|
||||
* Copyright (c) 2014-2024 The xterm.js authors. All rights reserved.
|
||||
* @license MIT
|
||||
*
|
||||
+1
-1
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+1
-1
@@ -1 +1 @@
|
||||
import{M as x,c as s}from"./index-BfUIlcDy.js";const l={7:-7.5,8:-10,9:-12.5,10:-15,11:-17.5,12:-20},d=-116,i=8,u=5;function y(t,e){return t-e}function S(t){return l[t]??l[i]}function f(t,e){const r=e+u;if(t<=e){const o=t<=e-5?0:1;return{bars:o,color:"text-red-600 dark:text-red-400",snr:t,quality:o===0?"none":"poor"}}if(t<r){const n=(t-e)/u<.5?2:3;return{bars:n,color:n===2?"text-orange-600 dark:text-orange-400":"text-yellow-600 dark:text-yellow-400",snr:t,quality:"fair"}}const a=t-r>=10?5:4;return{bars:a,color:a===5?"text-green-600 dark:text-green-400":"text-green-600 dark:text-green-300",snr:t,quality:a===5?"excellent":"good"}}function N(){const t=x(),e=s(()=>t.noiseFloorDbm??d),r=s(()=>t.stats?.config?.radio?.spreading_factor??i),c=s(()=>S(r.value));return{getSignalQuality:o=>{if(!o||o>0||o<-120)return{bars:0,color:"text-gray-400 dark:text-gray-500",snr:-999,quality:"none"};const n=y(o,e.value),g=Math.max(-30,Math.min(20,n));return f(g,c.value)},noiseFloor:e,spreadingFactor:r,minSNR:c}}export{N as u};
|
||||
import{M as x,c as s}from"./index-DyUIpN7m.js";const l={7:-7.5,8:-10,9:-12.5,10:-15,11:-17.5,12:-20},d=-116,i=8,u=5;function y(t,e){return t-e}function S(t){return l[t]??l[i]}function f(t,e){const r=e+u;if(t<=e){const o=t<=e-5?0:1;return{bars:o,color:"text-red-600 dark:text-red-400",snr:t,quality:o===0?"none":"poor"}}if(t<r){const n=(t-e)/u<.5?2:3;return{bars:n,color:n===2?"text-orange-600 dark:text-orange-400":"text-yellow-600 dark:text-yellow-400",snr:t,quality:"fair"}}const a=t-r>=10?5:4;return{bars:a,color:a===5?"text-green-600 dark:text-green-400":"text-green-600 dark:text-green-300",snr:t,quality:a===5?"excellent":"good"}}function N(){const t=x(),e=s(()=>t.noiseFloorDbm??d),r=s(()=>t.stats?.config?.radio?.spreading_factor??i),c=s(()=>S(r.value));return{getSignalQuality:o=>{if(!o||o>0||o<-120)return{bars:0,color:"text-gray-400 dark:text-gray-500",snr:-999,quality:"none"};const n=y(o,e.value),g=Math.max(-30,Math.min(20,n));return f(g,c.value)},noiseFloor:e,spreadingFactor:r,minSNR:c}}export{N as u};
|
||||
@@ -8,7 +8,7 @@
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Noto+Sans:wght@400;500;600;700&display=swap" rel="stylesheet">
|
||||
<script type="module" crossorigin src="/assets/index-BfUIlcDy.js"></script>
|
||||
<script type="module" crossorigin src="/assets/index-DyUIpN7m.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-D-3p9FIW.css">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
+128
-105
@@ -14,15 +14,21 @@ from pymc_core.protocol.utils import PAYLOAD_TYPES, ROUTE_TYPES
|
||||
|
||||
from repeater import __version__
|
||||
from repeater.data_acquisition import SQLiteHandler
|
||||
|
||||
from .api_endpoints import APIEndpoints
|
||||
from .auth_endpoints import AuthEndpoints
|
||||
from .auth.jwt_handler import JWTHandler
|
||||
from .auth.api_tokens import APITokenManager
|
||||
from .auth import cherrypy_tool # Import to register the tool
|
||||
from .auth.api_tokens import APITokenManager
|
||||
from .auth.jwt_handler import JWTHandler
|
||||
from .auth_endpoints import AuthEndpoints
|
||||
|
||||
# WebSocket support
|
||||
try:
|
||||
from repeater.data_acquisition.websocket_handler import PacketWebSocket, init_websocket, broadcast_packet
|
||||
from repeater.data_acquisition.websocket_handler import (
|
||||
PacketWebSocket,
|
||||
broadcast_packet,
|
||||
init_websocket,
|
||||
)
|
||||
|
||||
WEBSOCKET_AVAILABLE = True
|
||||
except ImportError:
|
||||
WEBSOCKET_AVAILABLE = False
|
||||
@@ -61,40 +67,41 @@ _log_buffer = LogBuffer(max_lines=100)
|
||||
|
||||
class DocEndpoint:
|
||||
"""Simple wrapper to serve API docs at /doc"""
|
||||
|
||||
|
||||
def __init__(self, api_endpoints):
|
||||
self.api_endpoints = api_endpoints
|
||||
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
"""Serve Swagger UI at /doc"""
|
||||
return self.api_endpoints.docs()
|
||||
|
||||
|
||||
@cherrypy.expose
|
||||
def docs(self):
|
||||
"""Serve Swagger UI at /doc/docs"""
|
||||
return self.api_endpoints.docs()
|
||||
|
||||
|
||||
@cherrypy.expose
|
||||
def openapi_json(self):
|
||||
"""Serve OpenAPI spec in JSON format at /doc/openapi.json"""
|
||||
import os
|
||||
import yaml
|
||||
import json
|
||||
|
||||
spec_path = os.path.join(os.path.dirname(__file__), 'openapi.yaml')
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
spec_path = os.path.join(os.path.dirname(__file__), "openapi.yaml")
|
||||
try:
|
||||
with open(spec_path, 'r') as f:
|
||||
with open(spec_path, "r") as f:
|
||||
spec_content = yaml.safe_load(f)
|
||||
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json'
|
||||
return json.dumps(spec_content).encode('utf-8')
|
||||
|
||||
cherrypy.response.headers["Content-Type"] = "application/json"
|
||||
return json.dumps(spec_content).encode("utf-8")
|
||||
except FileNotFoundError:
|
||||
cherrypy.response.status = 404
|
||||
return json.dumps({"error": "OpenAPI spec not found"}).encode('utf-8')
|
||||
return json.dumps({"error": "OpenAPI spec not found"}).encode("utf-8")
|
||||
except Exception as e:
|
||||
cherrypy.response.status = 500
|
||||
return json.dumps({"error": f"Error loading OpenAPI spec: {e}"}).encode('utf-8')
|
||||
return json.dumps({"error": f"Error loading OpenAPI spec: {e}"}).encode("utf-8")
|
||||
|
||||
|
||||
class StatsApp:
|
||||
@@ -116,7 +123,7 @@ class StatsApp:
|
||||
self.pub_key = pub_key
|
||||
self.dashboard_template = None
|
||||
self.config = config or {}
|
||||
|
||||
|
||||
# Path to the compiled Vue.js application
|
||||
# Use web_path from config if provided, otherwise use default
|
||||
default_html_dir = os.path.join(os.path.dirname(__file__), "html")
|
||||
@@ -124,8 +131,10 @@ class StatsApp:
|
||||
self.html_dir = web_path if web_path is not None else default_html_dir
|
||||
|
||||
# Create nested API object for routing
|
||||
self.api = APIEndpoints(stats_getter, send_advert_func, self.config, event_loop, daemon_instance, config_path)
|
||||
|
||||
self.api = APIEndpoints(
|
||||
stats_getter, send_advert_func, self.config, event_loop, daemon_instance, config_path
|
||||
)
|
||||
|
||||
# Create doc endpoint for API documentation
|
||||
self.doc = DocEndpoint(self.api)
|
||||
|
||||
@@ -134,7 +143,7 @@ class StatsApp:
|
||||
"""Serve the Vue.js application index.html."""
|
||||
index_path = os.path.join(self.html_dir, "index.html")
|
||||
try:
|
||||
with open(index_path, 'r', encoding='utf-8') as f:
|
||||
with open(index_path, "r", encoding="utf-8") as f:
|
||||
return f.read()
|
||||
except FileNotFoundError:
|
||||
raise cherrypy.HTTPError(404, "Application not found. Please build the frontend first.")
|
||||
@@ -148,19 +157,18 @@ class StatsApp:
|
||||
# Handle OPTIONS requests for any path
|
||||
if cherrypy.request.method == "OPTIONS":
|
||||
return ""
|
||||
|
||||
|
||||
# Let API routes pass through
|
||||
if args and args[0] == 'api':
|
||||
if args and args[0] == "api":
|
||||
raise cherrypy.NotFound()
|
||||
|
||||
|
||||
# Handle WebSocket routes
|
||||
if args and len(args) >= 2 and args[0] == 'ws' and args[1] == 'packets':
|
||||
if args and len(args) >= 2 and args[0] == "ws" and args[1] == "packets":
|
||||
# WebSocket tool will intercept this
|
||||
return ""
|
||||
|
||||
|
||||
# For all other routes, serve the Vue.js app (client-side routing)
|
||||
return self.index()
|
||||
|
||||
|
||||
|
||||
class HTTPStatsServer:
|
||||
@@ -183,20 +191,29 @@ class HTTPStatsServer:
|
||||
self.port = port
|
||||
self.config = config or {}
|
||||
self.config_path = config_path
|
||||
|
||||
|
||||
# Initialize authentication handlers
|
||||
self._init_auth_handlers()
|
||||
|
||||
|
||||
self.app = StatsApp(
|
||||
stats_getter, node_name, pub_key, send_advert_func, config, event_loop, daemon_instance, config_path
|
||||
stats_getter,
|
||||
node_name,
|
||||
pub_key,
|
||||
send_advert_func,
|
||||
config,
|
||||
event_loop,
|
||||
daemon_instance,
|
||||
config_path,
|
||||
)
|
||||
|
||||
|
||||
# Create auth endpoints (APIEndpoints has the config_manager)
|
||||
self.auth_app = AuthEndpoints(self.config, self.jwt_handler, self.token_manager, self.app.api.config_manager)
|
||||
|
||||
self.auth_app = AuthEndpoints(
|
||||
self.config, self.jwt_handler, self.token_manager, self.app.api.config_manager
|
||||
)
|
||||
|
||||
# Create documentation endpoints as separate app
|
||||
self.doc_app = DocEndpoint(self.app.api)
|
||||
|
||||
|
||||
# Set up CORS at the server level if enabled
|
||||
self._cors_enabled = self.config.get("web", {}).get("cors_enabled", False)
|
||||
logger.info(f"CORS enabled: {self._cors_enabled}")
|
||||
@@ -207,43 +224,46 @@ class HTTPStatsServer:
|
||||
repeater_config = self.config.get("repeater", {})
|
||||
security_config = repeater_config.get("security", {})
|
||||
jwt_secret = security_config.get("jwt_secret", "")
|
||||
|
||||
|
||||
if not jwt_secret:
|
||||
# Auto-generate JWT secret
|
||||
jwt_secret = secrets.token_hex(32)
|
||||
logger.warning("No JWT secret found in config, auto-generated one. Please save this to config.yaml:")
|
||||
|
||||
logger.warning(
|
||||
"No JWT secret found in config, auto-generated one. Please save this to config.yaml:"
|
||||
)
|
||||
|
||||
# Try to save to config if config_path is available
|
||||
if self.config_path:
|
||||
try:
|
||||
import yaml
|
||||
with open(self.config_path, 'r') as f:
|
||||
|
||||
with open(self.config_path, "r") as f:
|
||||
config_data = yaml.safe_load(f) or {}
|
||||
|
||||
if 'repeater' not in config_data:
|
||||
config_data['repeater'] = {}
|
||||
if 'security' not in config_data['repeater']:
|
||||
config_data['repeater']['security'] = {}
|
||||
config_data['repeater']['security']['jwt_secret'] = jwt_secret
|
||||
|
||||
with open(self.config_path, 'w') as f:
|
||||
|
||||
if "repeater" not in config_data:
|
||||
config_data["repeater"] = {}
|
||||
if "security" not in config_data["repeater"]:
|
||||
config_data["repeater"]["security"] = {}
|
||||
config_data["repeater"]["security"]["jwt_secret"] = jwt_secret
|
||||
|
||||
with open(self.config_path, "w") as f:
|
||||
yaml.dump(config_data, f, default_flow_style=False)
|
||||
|
||||
|
||||
logger.info(f"Saved auto-generated JWT secret to {self.config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save JWT secret to config: {e}")
|
||||
|
||||
|
||||
# Initialize JWT handler with configurable expiry (default 1 hour)
|
||||
jwt_expiry_minutes = security_config.get("jwt_expiry_minutes", 60)
|
||||
self.jwt_handler = JWTHandler(jwt_secret, expiry_minutes=jwt_expiry_minutes)
|
||||
logger.info(f"JWT handler initialized (token expiry: {jwt_expiry_minutes} minutes)")
|
||||
|
||||
|
||||
# Initialize API token manager
|
||||
storage_dir = self.config.get("storage", {}).get("storage_dir", ".")
|
||||
|
||||
|
||||
# Ensure storage directory exists
|
||||
os.makedirs(storage_dir, exist_ok=True)
|
||||
|
||||
|
||||
# Initialize SQLiteHandler and APITokenManager
|
||||
self.sqlite_handler = SQLiteHandler(Path(storage_dir))
|
||||
self.token_manager = APITokenManager(self.sqlite_handler, jwt_secret)
|
||||
@@ -254,29 +274,25 @@ class HTTPStatsServer:
|
||||
# Configure CORS to allow Authorization header
|
||||
# cherrypy-cors will handle preflight requests automatically
|
||||
cherrypy_cors.install()
|
||||
|
||||
|
||||
logger.info("CORS support enabled with Authorization header")
|
||||
|
||||
|
||||
def _json_error_handler(self, status, message, traceback, version):
|
||||
"""Return JSON error responses instead of HTML for API endpoints"""
|
||||
cherrypy.response.headers["Content-Type"] = "application/json"
|
||||
return json.dumps({
|
||||
"success": False,
|
||||
"error": message
|
||||
})
|
||||
return json.dumps({"success": False, "error": message})
|
||||
|
||||
def start(self):
|
||||
|
||||
try:
|
||||
|
||||
|
||||
if self._cors_enabled:
|
||||
self._setup_server_cors()
|
||||
|
||||
|
||||
default_html_dir = os.path.join(os.path.dirname(__file__), "html")
|
||||
web_path = self.config.get("web", {}).get("web_path")
|
||||
html_dir = web_path if web_path is not None else default_html_dir
|
||||
|
||||
|
||||
assets_dir = os.path.join(html_dir, "assets")
|
||||
next_dir = os.path.join(html_dir, "_next")
|
||||
|
||||
@@ -288,11 +304,11 @@ class HTTPStatsServer:
|
||||
# "tools.gzip.mime_types": ["application/json", "text/html", "text/plain"],
|
||||
# Ensure proper content types for static files
|
||||
"tools.staticfile.content_types": {
|
||||
'js': 'application/javascript',
|
||||
'css': 'text/css',
|
||||
'html': 'text/html; charset=utf-8',
|
||||
'svg': 'image/svg+xml',
|
||||
'txt': 'text/plain'
|
||||
"js": "application/javascript",
|
||||
"css": "text/css",
|
||||
"html": "text/html; charset=utf-8",
|
||||
"svg": "image/svg+xml",
|
||||
"txt": "text/plain",
|
||||
},
|
||||
},
|
||||
# Require authentication for all /api endpoints
|
||||
@@ -330,7 +346,7 @@ class HTTPStatsServer:
|
||||
"tools.staticfile.filename": os.path.join(html_dir, "favicon.ico"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Add WebSocket configuration to main config if available
|
||||
if WEBSOCKET_AVAILABLE:
|
||||
try:
|
||||
@@ -340,33 +356,34 @@ class HTTPStatsServer:
|
||||
"tools.websocket.handler_cls": PacketWebSocket,
|
||||
"tools.trailing_slash.on": False,
|
||||
"tools.require_auth.on": False,
|
||||
"tools.gzip.on": False,
|
||||
"tools.gzip.on": False,
|
||||
}
|
||||
logger.info("WebSocket endpoint configured at /ws/packets")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize WebSocket: {e}")
|
||||
import traceback
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
# Add CORS configuration if enabled
|
||||
if self._cors_enabled:
|
||||
cors_config = {
|
||||
"cors.expose.on": True,
|
||||
"tools.response_headers.on": True,
|
||||
"tools.response_headers.headers": [
|
||||
('Access-Control-Allow-Origin', '*'),
|
||||
('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS'),
|
||||
('Access-Control-Allow-Headers', 'Authorization, Content-Type, X-API-Key'),
|
||||
('Access-Control-Allow-Credentials', 'true'),
|
||||
("Access-Control-Allow-Origin", "*"),
|
||||
("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"),
|
||||
("Access-Control-Allow-Headers", "Authorization, Content-Type, X-API-Key"),
|
||||
("Access-Control-Allow-Credentials", "true"),
|
||||
],
|
||||
# Disable automatic trailing slash redirects to prevent CORS issues
|
||||
"tools.trailing_slash.on": False,
|
||||
}
|
||||
|
||||
|
||||
# Apply CORS to paths
|
||||
config["/"].update(cors_config)
|
||||
config["/api"].update(cors_config)
|
||||
|
||||
|
||||
# Add Vue.js assets support only if assets directory exists
|
||||
if os.path.isdir(assets_dir):
|
||||
config["/assets"] = {
|
||||
@@ -374,12 +391,12 @@ class HTTPStatsServer:
|
||||
"tools.staticdir.dir": assets_dir,
|
||||
# Set proper content types for assets
|
||||
"tools.staticdir.content_types": {
|
||||
'js': 'application/javascript',
|
||||
'css': 'text/css',
|
||||
'map': 'application/json'
|
||||
"js": "application/javascript",
|
||||
"css": "text/css",
|
||||
"map": "application/json",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Add Next.js support only if _next directory exists
|
||||
if os.path.isdir(next_dir):
|
||||
config["/_next"] = {
|
||||
@@ -387,9 +404,9 @@ class HTTPStatsServer:
|
||||
"tools.staticdir.dir": next_dir,
|
||||
# Set proper content types for Next.js assets
|
||||
"tools.staticdir.content_types": {
|
||||
'js': 'application/javascript',
|
||||
'css': 'text/css',
|
||||
'map': 'application/json'
|
||||
"js": "application/javascript",
|
||||
"css": "text/css",
|
||||
"map": "application/json",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -421,13 +438,13 @@ class HTTPStatsServer:
|
||||
|
||||
# Mount main app
|
||||
cherrypy.tree.mount(self.app, "/", config)
|
||||
|
||||
|
||||
# Mount auth endpoints
|
||||
auth_config = {
|
||||
"/": {
|
||||
"tools.response_headers.on": True,
|
||||
"tools.response_headers.headers": [
|
||||
('Content-Type', 'application/json'),
|
||||
("Content-Type", "application/json"),
|
||||
],
|
||||
# Disable automatic trailing slash redirects
|
||||
"tools.trailing_slash.on": False,
|
||||
@@ -436,42 +453,48 @@ class HTTPStatsServer:
|
||||
if self._cors_enabled:
|
||||
auth_config["/"]["cors.expose.on"] = True
|
||||
# Add CORS headers for OPTIONS requests
|
||||
auth_config["/"]["tools.response_headers.headers"].extend([
|
||||
('Access-Control-Allow-Origin', '*'),
|
||||
('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS'),
|
||||
('Access-Control-Allow-Headers', 'Authorization, Content-Type, X-API-Key'),
|
||||
('Access-Control-Allow-Credentials', 'true'),
|
||||
])
|
||||
|
||||
auth_config["/"]["tools.response_headers.headers"].extend(
|
||||
[
|
||||
("Access-Control-Allow-Origin", "*"),
|
||||
("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS"),
|
||||
("Access-Control-Allow-Headers", "Authorization, Content-Type, X-API-Key"),
|
||||
("Access-Control-Allow-Credentials", "true"),
|
||||
]
|
||||
)
|
||||
|
||||
cherrypy.tree.mount(self.auth_app, "/auth", auth_config)
|
||||
|
||||
|
||||
# Mount documentation endpoints as separate app (no auth required for docs)
|
||||
doc_config = {
|
||||
"/": {
|
||||
"tools.require_auth.on": False, # Docs are publicly accessible
|
||||
"tools.response_headers.on": True,
|
||||
"tools.response_headers.headers": [
|
||||
('Content-Type', 'text/html; charset=utf-8'),
|
||||
("Content-Type", "text/html; charset=utf-8"),
|
||||
],
|
||||
"tools.trailing_slash.on": False,
|
||||
}
|
||||
}
|
||||
if self._cors_enabled:
|
||||
doc_config["/"]["cors.expose.on"] = True
|
||||
doc_config["/"]["tools.response_headers.headers"].extend([
|
||||
('Access-Control-Allow-Origin', '*'),
|
||||
('Access-Control-Allow-Methods', 'GET, POST, OPTIONS'),
|
||||
('Access-Control-Allow-Headers', 'Authorization, Content-Type, X-API-Key'),
|
||||
])
|
||||
|
||||
doc_config["/"]["tools.response_headers.headers"].extend(
|
||||
[
|
||||
("Access-Control-Allow-Origin", "*"),
|
||||
("Access-Control-Allow-Methods", "GET, POST, OPTIONS"),
|
||||
("Access-Control-Allow-Headers", "Authorization, Content-Type, X-API-Key"),
|
||||
]
|
||||
)
|
||||
|
||||
cherrypy.tree.mount(self.doc_app, "/doc", doc_config)
|
||||
|
||||
|
||||
# Store auth handlers in cherrypy config for middleware access
|
||||
cherrypy.config.update({
|
||||
"jwt_handler": self.jwt_handler,
|
||||
"token_manager": self.token_manager,
|
||||
"security_config": self.config.get("security", {}),
|
||||
})
|
||||
cherrypy.config.update(
|
||||
{
|
||||
"jwt_handler": self.jwt_handler,
|
||||
"token_manager": self.token_manager,
|
||||
"security_config": self.config.get("security", {}),
|
||||
}
|
||||
)
|
||||
|
||||
# Completely disable access logging
|
||||
cherrypy.log.access_log.propagate = False
|
||||
|
||||
+12
-12
@@ -3,7 +3,7 @@ info:
|
||||
title: pyMC Repeater API
|
||||
description: |
|
||||
REST API for pyMC Repeater - LoRa mesh network repeater with room server functionality.
|
||||
|
||||
|
||||
## Features
|
||||
- System statistics and monitoring
|
||||
- Packet history and analysis
|
||||
@@ -726,7 +726,7 @@ paths:
|
||||
summary: Get RRD time-series data
|
||||
description: |
|
||||
Retrieve Round-Robin Database metrics for graphing.
|
||||
|
||||
|
||||
**Note:** This endpoint extracts parameters from the request internally.
|
||||
Parameters are handled automatically by the backend.
|
||||
responses:
|
||||
@@ -821,7 +821,7 @@ paths:
|
||||
summary: Get system metrics graph data
|
||||
description: |
|
||||
Returns time-series data for system metrics like packet counts, RSSI, SNR, etc.
|
||||
|
||||
|
||||
Available metrics:
|
||||
- rx_count: Received packets
|
||||
- tx_count: Transmitted packets
|
||||
@@ -1557,7 +1557,7 @@ paths:
|
||||
summary: Get ACL information for all identities
|
||||
description: |
|
||||
Get ACL configuration and statistics for all registered identities.
|
||||
|
||||
|
||||
Returns information including:
|
||||
- Identity name, type, and hash
|
||||
- Max clients allowed
|
||||
@@ -1741,7 +1741,7 @@ paths:
|
||||
summary: Get room messages
|
||||
description: |
|
||||
Retrieve messages from a room with pagination.
|
||||
|
||||
|
||||
**Max Messages Per Room**: 32 (hard limit)
|
||||
- Older messages auto-deleted every 10 minutes
|
||||
- Cannot be increased beyond 32
|
||||
@@ -1847,15 +1847,15 @@ paths:
|
||||
summary: Post message to room
|
||||
description: |
|
||||
Add a new message to a room server. Message will be distributed to all synced clients.
|
||||
|
||||
|
||||
**Special author values:**
|
||||
- `"server"` or `"system"` - System message, goes to ALL clients (API only)
|
||||
- Any hex string - Normal message, NOT sent to that client
|
||||
|
||||
|
||||
**Security:**
|
||||
- Radio messages cannot use server key (blocked)
|
||||
- API messages can use server key (for announcements)
|
||||
|
||||
|
||||
**Rate Limits:**
|
||||
- 10 messages/minute per author_pubkey
|
||||
- 160 bytes max message length
|
||||
@@ -1992,7 +1992,7 @@ paths:
|
||||
summary: Get room statistics
|
||||
description: |
|
||||
Get detailed statistics for one or all room servers.
|
||||
|
||||
|
||||
**Room Limits:**
|
||||
- 32 messages maximum per room (hard limit)
|
||||
- Messages auto-expire every 10 minutes
|
||||
@@ -2101,7 +2101,7 @@ paths:
|
||||
summary: Get room clients
|
||||
description: |
|
||||
List all clients synced to a room with their status.
|
||||
|
||||
|
||||
**Client Filtering:**
|
||||
- Clients only receive messages where author_pubkey ≠ client_pubkey
|
||||
- unsynced_count shows pending messages for each client
|
||||
@@ -2335,7 +2335,7 @@ components:
|
||||
type: boolean
|
||||
description: Client is currently active (synced within timeout period)
|
||||
example: true
|
||||
|
||||
|
||||
Identity:
|
||||
type: object
|
||||
required: [name, type, hash, public_key]
|
||||
@@ -2385,7 +2385,7 @@ components:
|
||||
default: 32
|
||||
description: Maximum messages to keep (room_server only, hard limit 32)
|
||||
example: 32
|
||||
|
||||
|
||||
ACLClient:
|
||||
type: object
|
||||
required: [public_key, public_key_full, address, permissions]
|
||||
|
||||
@@ -152,7 +152,7 @@ if [ -n "$DEB_FILE" ]; then
|
||||
# Run lintian to check package quality
|
||||
log_step "Running lintian checks..."
|
||||
lintian "$DEB_FILE" || log_warn "Lintian found some issues (non-fatal)"
|
||||
|
||||
|
||||
log_info ""
|
||||
log_info "════════════════════════════════════════════════════════════"
|
||||
log_info "Production build complete!"
|
||||
|
||||
@@ -23,7 +23,7 @@ log_error() {
|
||||
}
|
||||
|
||||
# Check if running as root or with sudo
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
log_error "This script must be run with sudo or as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Executable
+373
@@ -0,0 +1,373 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# test_companion_api.sh — Smoke-test the companion REST + SSE endpoints
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/test_companion_api.sh # defaults
|
||||
# ./scripts/test_companion_api.sh -H 192.168.1.10 # custom host
|
||||
# ./scripts/test_companion_api.sh -p 9000 # custom port
|
||||
# ./scripts/test_companion_api.sh -k <api-key> # use API key instead of JWT
|
||||
# ./scripts/test_companion_api.sh -P <pub_key_hex> # target contact for send tests
|
||||
#
|
||||
# Requires: curl, jq
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ----- Defaults -----
|
||||
HOST="localhost"
|
||||
PORT="8000"
|
||||
USERNAME="admin"
|
||||
PASSWORD=""
|
||||
CLIENT_ID="test-companion-api"
|
||||
API_KEY=""
|
||||
TARGET_PUBKEY=""
|
||||
COMPANION_NAME=""
|
||||
|
||||
# ----- Parse args -----
|
||||
while getopts "H:p:u:w:k:P:c:h" opt; do
|
||||
case $opt in
|
||||
H) HOST="$OPTARG" ;;
|
||||
p) PORT="$OPTARG" ;;
|
||||
u) USERNAME="$OPTARG" ;;
|
||||
w) PASSWORD="$OPTARG" ;;
|
||||
k) API_KEY="$OPTARG" ;;
|
||||
P) TARGET_PUBKEY="$OPTARG" ;;
|
||||
c) COMPANION_NAME="$OPTARG" ;;
|
||||
h)
|
||||
echo "Usage: $0 [-H host] [-p port] [-u user] [-w password] [-k api_key] [-P target_pubkey] [-c companion_name]"
|
||||
exit 0
|
||||
;;
|
||||
*) echo "Unknown option: -$opt" >&2; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
BASE="http://${HOST}:${PORT}"
|
||||
PASS=0
|
||||
FAIL=0
|
||||
SKIP=0
|
||||
|
||||
# ----- Colours -----
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[0;33m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
# ----- Helpers -----
|
||||
|
||||
auth_header() {
|
||||
if [[ -n "$API_KEY" ]]; then
|
||||
echo "X-API-Key: ${API_KEY}"
|
||||
elif [[ -n "$TOKEN" ]]; then
|
||||
echo "Authorization: Bearer ${TOKEN}"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Run a test: name, expected_http_code, curl_args...
|
||||
run_test() {
|
||||
local name="$1"
|
||||
local expect_code="$2"
|
||||
shift 2
|
||||
|
||||
printf " %-50s " "$name"
|
||||
|
||||
local tmpfile
|
||||
tmpfile=$(mktemp)
|
||||
|
||||
local http_code
|
||||
http_code=$(curl -s -o "$tmpfile" -w "%{http_code}" \
|
||||
-H "$(auth_header)" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$@" 2>/dev/null) || true
|
||||
|
||||
local body
|
||||
body=$(cat "$tmpfile")
|
||||
rm -f "$tmpfile"
|
||||
|
||||
if [[ "$http_code" == "$expect_code" ]]; then
|
||||
local success
|
||||
success=$(echo "$body" | jq -r '.success // empty' 2>/dev/null) || true
|
||||
if [[ "$success" == "true" ]]; then
|
||||
printf "${GREEN}PASS${NC} (HTTP %s)\n" "$http_code"
|
||||
PASS=$((PASS + 1))
|
||||
elif [[ "$success" == "false" ]]; then
|
||||
local err
|
||||
err=$(echo "$body" | jq -r '.error // .data.reason // "unknown"' 2>/dev/null) || true
|
||||
printf "${YELLOW}PASS${NC} (HTTP %s, success=false: %s)\n" "$http_code" "$err"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
printf "${GREEN}PASS${NC} (HTTP %s)\n" "$http_code"
|
||||
PASS=$((PASS + 1))
|
||||
fi
|
||||
else
|
||||
printf "${RED}FAIL${NC} (expected HTTP %s, got %s)\n" "$expect_code" "$http_code"
|
||||
if [[ -n "$body" ]]; then
|
||||
echo " $(echo "$body" | jq -c '.' 2>/dev/null || echo "$body" | head -c 200)"
|
||||
fi
|
||||
FAIL=$((FAIL + 1))
|
||||
fi
|
||||
}
|
||||
|
||||
skip_test() {
|
||||
local name="$1"
|
||||
local reason="$2"
|
||||
printf " %-50s ${YELLOW}SKIP${NC} (%s)\n" "$name" "$reason"
|
||||
SKIP=$((SKIP + 1))
|
||||
}
|
||||
|
||||
# Pretty-print a JSON response
|
||||
show_response() {
|
||||
local name="$1"
|
||||
shift
|
||||
printf "\n${CYAN}--- %s ---${NC}\n" "$name"
|
||||
curl -s -H "$(auth_header)" -H "Content-Type: application/json" "$@" 2>/dev/null | jq '.' 2>/dev/null || echo "(no JSON)"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# 0. Connectivity check
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo " Companion API Test Suite"
|
||||
echo " Target: ${BASE}"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
|
||||
printf "Checking connectivity... "
|
||||
if ! curl -sf -o /dev/null --connect-timeout 3 "${BASE}/api/needs_setup" 2>/dev/null; then
|
||||
printf "${RED}FAILED${NC}\n"
|
||||
echo "Cannot reach ${BASE}. Is the repeater running?"
|
||||
exit 1
|
||||
fi
|
||||
printf "${GREEN}OK${NC}\n"
|
||||
|
||||
# =============================================================================
|
||||
# 1. Authentication
|
||||
# =============================================================================
|
||||
|
||||
TOKEN=""
|
||||
|
||||
if [[ -n "$API_KEY" ]]; then
|
||||
echo ""
|
||||
echo "Using API key for authentication."
|
||||
TOKEN=""
|
||||
elif [[ -n "$PASSWORD" ]]; then
|
||||
echo ""
|
||||
printf "Authenticating as '${USERNAME}'... "
|
||||
LOGIN_RESP=$(curl -s -X POST "${BASE}/auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"username\":\"${USERNAME}\",\"password\":\"${PASSWORD}\",\"client_id\":\"${CLIENT_ID}\"}" 2>/dev/null)
|
||||
|
||||
TOKEN=$(echo "$LOGIN_RESP" | jq -r '.token // empty' 2>/dev/null) || true
|
||||
if [[ -n "$TOKEN" ]]; then
|
||||
printf "${GREEN}OK${NC} (token received)\n"
|
||||
else
|
||||
printf "${RED}FAILED${NC}\n"
|
||||
echo "$LOGIN_RESP" | jq '.' 2>/dev/null || echo "$LOGIN_RESP"
|
||||
echo ""
|
||||
echo "Cannot authenticate. Provide -w <password> or -k <api_key>."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo ""
|
||||
echo "No password (-w) or API key (-k) provided."
|
||||
echo "Attempting unauthenticated requests (will fail if auth is required)."
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# 2. Read-only GET endpoints
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- GET endpoints (read-only) ---"
|
||||
|
||||
# Build companion_name query string if provided
|
||||
QS=""
|
||||
if [[ -n "$COMPANION_NAME" ]]; then
|
||||
QS="?companion_name=${COMPANION_NAME}"
|
||||
fi
|
||||
|
||||
run_test "GET /api/companion/" 200 "${BASE}/api/companion/"
|
||||
run_test "GET /api/companion/self_info" 200 "${BASE}/api/companion/self_info${QS}"
|
||||
run_test "GET /api/companion/contacts" 200 "${BASE}/api/companion/contacts${QS}"
|
||||
run_test "GET /api/companion/channels" 200 "${BASE}/api/companion/channels${QS}"
|
||||
run_test "GET /api/companion/stats" 200 "${BASE}/api/companion/stats${QS}"
|
||||
run_test "GET /api/companion/stats?type=core" 200 "${BASE}/api/companion/stats${QS:+${QS}&}${QS:+type=core}${QS:-?type=core}"
|
||||
|
||||
# Single contact lookup (needs a pub_key — grab the first one from contacts list)
|
||||
FIRST_PUBKEY=$(curl -s -H "$(auth_header)" "${BASE}/api/companion/contacts${QS}" 2>/dev/null \
|
||||
| jq -r '.data[0].public_key // empty' 2>/dev/null) || true
|
||||
|
||||
if [[ -n "$FIRST_PUBKEY" ]]; then
|
||||
run_test "GET /api/companion/contact?pub_key=..." 200 \
|
||||
"${BASE}/api/companion/contact?pub_key=${FIRST_PUBKEY}${QS:+&companion_name=${COMPANION_NAME}}"
|
||||
else
|
||||
skip_test "GET /api/companion/contact?pub_key=..." "no contacts available"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# 3. Validation / error handling
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- Validation & error handling ---"
|
||||
|
||||
run_test "GET /api/companion/contact (no pub_key)" 400 "${BASE}/api/companion/contact"
|
||||
run_test "GET /api/companion/contact (bad pub_key)" 400 "${BASE}/api/companion/contact?pub_key=zzzz"
|
||||
run_test "POST send_text empty body" 400 -X POST "${BASE}/api/companion/send_text" -d '{}'
|
||||
run_test "GET send_text (wrong method)" 405 "${BASE}/api/companion/send_text"
|
||||
|
||||
# =============================================================================
|
||||
# 4. POST endpoints (write / send operations)
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- POST endpoints ---"
|
||||
|
||||
# Use TARGET_PUBKEY if provided, else use FIRST_PUBKEY from contacts list
|
||||
PK="${TARGET_PUBKEY:-$FIRST_PUBKEY}"
|
||||
|
||||
if [[ -z "$PK" ]]; then
|
||||
skip_test "POST /api/companion/login" "no target pubkey (-P)"
|
||||
skip_test "POST /api/companion/request_status" "no target pubkey (-P)"
|
||||
skip_test "POST /api/companion/request_telemetry" "no target pubkey (-P)"
|
||||
skip_test "POST /api/companion/send_text" "no target pubkey (-P)"
|
||||
skip_test "POST /api/companion/send_command" "no target pubkey (-P)"
|
||||
skip_test "POST /api/companion/reset_path" "no target pubkey (-P)"
|
||||
else
|
||||
# Build optional companion_name field for POST body
|
||||
CN_FIELD=""
|
||||
if [[ -n "$COMPANION_NAME" ]]; then
|
||||
CN_FIELD="\"companion_name\":\"${COMPANION_NAME}\","
|
||||
fi
|
||||
|
||||
# Login (passwordless)
|
||||
run_test "POST /api/companion/login" 200 \
|
||||
-X POST "${BASE}/api/companion/login" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\",\"password\":\"\"}"
|
||||
|
||||
# Status request (may timeout — that's OK, we test the plumbing)
|
||||
run_test "POST /api/companion/request_status" 200 \
|
||||
-X POST "${BASE}/api/companion/request_status" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\",\"timeout\":5}"
|
||||
|
||||
# Telemetry request
|
||||
run_test "POST /api/companion/request_telemetry" 200 \
|
||||
-X POST "${BASE}/api/companion/request_telemetry" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\",\"timeout\":5}"
|
||||
|
||||
# Send text
|
||||
run_test "POST /api/companion/send_text" 200 \
|
||||
-X POST "${BASE}/api/companion/send_text" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\",\"text\":\"API test ping\"}"
|
||||
|
||||
# Send command
|
||||
run_test "POST /api/companion/send_command" 200 \
|
||||
-X POST "${BASE}/api/companion/send_command" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\",\"command\":\"status\"}"
|
||||
|
||||
# Reset path
|
||||
run_test "POST /api/companion/reset_path" 200 \
|
||||
-X POST "${BASE}/api/companion/reset_path" \
|
||||
-d "{${CN_FIELD}\"pub_key\":\"${PK}\"}"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# 5. Device configuration endpoints
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- Device configuration ---"
|
||||
|
||||
CN_FIELD=""
|
||||
if [[ -n "$COMPANION_NAME" ]]; then
|
||||
CN_FIELD="\"companion_name\":\"${COMPANION_NAME}\","
|
||||
fi
|
||||
|
||||
# Set advert name (we'll read it back to verify)
|
||||
run_test "POST /api/companion/set_advert_name" 200 \
|
||||
-X POST "${BASE}/api/companion/set_advert_name" \
|
||||
-d "{${CN_FIELD}\"advert_name\":\"TestNode\"}"
|
||||
|
||||
run_test "POST /api/companion/set_advert_location" 200 \
|
||||
-X POST "${BASE}/api/companion/set_advert_location" \
|
||||
-d "{${CN_FIELD}\"latitude\":37.7749,\"longitude\":-122.4194}"
|
||||
|
||||
# =============================================================================
|
||||
# 6. SSE event stream (quick connect/disconnect test)
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- SSE event stream ---"
|
||||
|
||||
SSE_URL="${BASE}/api/companion/events"
|
||||
if [[ -n "$TOKEN" ]]; then
|
||||
SSE_URL="${SSE_URL}?token=${TOKEN}"
|
||||
elif [[ -n "$API_KEY" ]]; then
|
||||
# SSE via EventSource doesn't support custom headers; API key in query not supported
|
||||
# so we just test that the endpoint responds
|
||||
SSE_URL="${SSE_URL}"
|
||||
fi
|
||||
|
||||
printf " %-50s " "SSE /api/companion/events (3s sample)"
|
||||
|
||||
SSE_TMP=$(mktemp)
|
||||
# Connect for 3 seconds, capture whatever comes
|
||||
curl -s -N --max-time 3 \
|
||||
-H "$(auth_header)" \
|
||||
"$SSE_URL" > "$SSE_TMP" 2>/dev/null || true
|
||||
|
||||
SSE_LINES=$(wc -l < "$SSE_TMP" | tr -d ' ')
|
||||
SSE_FIRST=$(head -1 "$SSE_TMP")
|
||||
|
||||
if [[ "$SSE_LINES" -gt 0 ]] && echo "$SSE_FIRST" | grep -q "data:"; then
|
||||
# Check for connected event
|
||||
if grep -q '"connected"' "$SSE_TMP" 2>/dev/null; then
|
||||
printf "${GREEN}PASS${NC} (connected event received, %s lines)\n" "$SSE_LINES"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
printf "${YELLOW}PASS${NC} (got %s lines, no 'connected' event)\n" "$SSE_LINES"
|
||||
PASS=$((PASS + 1))
|
||||
fi
|
||||
else
|
||||
printf "${RED}FAIL${NC} (no SSE data received)\n"
|
||||
FAIL=$((FAIL + 1))
|
||||
fi
|
||||
rm -f "$SSE_TMP"
|
||||
|
||||
# =============================================================================
|
||||
# 7. Verbose output: show full response bodies
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "--- Sample responses ---"
|
||||
|
||||
show_response "Companion listing" "${BASE}/api/companion/"
|
||||
show_response "Self info" "${BASE}/api/companion/self_info${QS}"
|
||||
show_response "Contacts" "${BASE}/api/companion/contacts${QS}"
|
||||
show_response "Stats (packets)" "${BASE}/api/companion/stats${QS}"
|
||||
|
||||
# =============================================================================
|
||||
# Summary
|
||||
# =============================================================================
|
||||
|
||||
echo ""
|
||||
echo "========================================"
|
||||
printf " Results: ${GREEN}%d passed${NC}" "$PASS"
|
||||
if [[ "$FAIL" -gt 0 ]]; then
|
||||
printf ", ${RED}%d failed${NC}" "$FAIL"
|
||||
fi
|
||||
if [[ "$SKIP" -gt 0 ]]; then
|
||||
printf ", ${YELLOW}%d skipped${NC}" "$SKIP"
|
||||
fi
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo ""
|
||||
|
||||
exit $FAIL
|
||||
+118
-51
@@ -43,56 +43,76 @@ repeater_name=${repeater_name:-$default_name}
|
||||
|
||||
echo "Repeater name: $repeater_name"
|
||||
echo ""
|
||||
echo "=== Step 1: Select Hardware ==="
|
||||
|
||||
# Step 0.5: Radio type (SX1262 hardware vs KISS modem)
|
||||
echo "=== Step 0.5: Select Radio Type ==="
|
||||
echo ""
|
||||
echo " 1) SX1262 hardware (SPI LoRa module - Raspberry Pi HAT, etc.)"
|
||||
echo " 2) KISS modem (serial TNC - requires pyMC_core with KISS support)"
|
||||
echo ""
|
||||
read -p "Select radio type (1 or 2): " radio_type_sel
|
||||
|
||||
if [ ! -f "$HARDWARE_CONFIG" ]; then
|
||||
echo "Error: Hardware configuration file not found at $HARDWARE_CONFIG"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$radio_type_sel" = "2" ]; then
|
||||
RADIO_TYPE="kiss"
|
||||
hw_key="kiss"
|
||||
hw_name="KISS modem"
|
||||
echo "Selected: $hw_name"
|
||||
echo ""
|
||||
else
|
||||
RADIO_TYPE="sx1262"
|
||||
echo "Selected: SX1262 hardware"
|
||||
echo ""
|
||||
echo "=== Step 1: Select Hardware ==="
|
||||
echo ""
|
||||
|
||||
# Parse hardware options from radio-settings.json
|
||||
hw_index=0
|
||||
declare -a hw_keys
|
||||
declare -a hw_names
|
||||
|
||||
# Extract hardware keys and names using grep and sed
|
||||
hw_data=$(grep -o '"[^"]*":\s*{' "$HARDWARE_CONFIG" | grep -v hardware | sed 's/"\([^"]*\)".*/\1/' | while read hw_key; do
|
||||
hw_name=$(grep -A 1 "\"$hw_key\"" "$HARDWARE_CONFIG" | grep "\"name\"" | sed 's/.*"name":\s*"\([^"]*\)".*/\1/')
|
||||
if [ -n "$hw_name" ]; then
|
||||
echo "$hw_key|$hw_name"
|
||||
if [ ! -f "$HARDWARE_CONFIG" ]; then
|
||||
echo "Error: Hardware configuration file not found at $HARDWARE_CONFIG"
|
||||
exit 1
|
||||
fi
|
||||
done)
|
||||
|
||||
while IFS='|' read -r hw_key hw_name; do
|
||||
if [ -n "$hw_key" ] && [ -n "$hw_name" ]; then
|
||||
echo " $((hw_index + 1))) $hw_name ($hw_key)"
|
||||
hw_keys[$hw_index]="$hw_key"
|
||||
hw_names[$hw_index]="$hw_name"
|
||||
((hw_index++))
|
||||
# Parse hardware options from radio-settings.json
|
||||
hw_index=0
|
||||
declare -a hw_keys
|
||||
declare -a hw_names
|
||||
|
||||
# Extract hardware keys and names using grep and sed
|
||||
hw_data=$(grep -o '"[^"]*":\s*{' "$HARDWARE_CONFIG" | grep -v hardware | sed 's/"\([^"]*\)".*/\1/' | while read hw_key; do
|
||||
hw_name=$(grep -A 1 "\"$hw_key\"" "$HARDWARE_CONFIG" | grep "\"name\"" | sed 's/.*"name":\s*"\([^"]*\)".*/\1/')
|
||||
if [ -n "$hw_name" ]; then
|
||||
echo "$hw_key|$hw_name"
|
||||
fi
|
||||
done)
|
||||
|
||||
while IFS='|' read -r hw_key hw_name; do
|
||||
if [ -n "$hw_key" ] && [ -n "$hw_name" ]; then
|
||||
echo " $((hw_index + 1))) $hw_name ($hw_key)"
|
||||
hw_keys[$hw_index]="$hw_key"
|
||||
hw_names[$hw_index]="$hw_name"
|
||||
((hw_index++))
|
||||
fi
|
||||
done <<< "$hw_data"
|
||||
|
||||
if [ "$hw_index" -eq 0 ]; then
|
||||
echo "Error: No hardware configurations found"
|
||||
exit 1
|
||||
fi
|
||||
done <<< "$hw_data"
|
||||
|
||||
if [ "$hw_index" -eq 0 ]; then
|
||||
echo "Error: No hardware configurations found"
|
||||
exit 1
|
||||
echo ""
|
||||
read -p "Select hardware (1-$hw_index): " hw_selection
|
||||
|
||||
if ! [ "$hw_selection" -ge 1 ] 2>/dev/null || [ "$hw_selection" -gt "$hw_index" ]; then
|
||||
echo "Error: Invalid selection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
selected_hw=$((hw_selection - 1))
|
||||
hw_key="${hw_keys[$selected_hw]}"
|
||||
hw_name="${hw_names[$selected_hw]}"
|
||||
|
||||
echo "Selected: $hw_name"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
echo ""
|
||||
read -p "Select hardware (1-$hw_index): " hw_selection
|
||||
|
||||
if ! [ "$hw_selection" -ge 1 ] 2>/dev/null || [ "$hw_selection" -gt "$hw_index" ]; then
|
||||
echo "Error: Invalid selection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
selected_hw=$((hw_selection - 1))
|
||||
hw_key="${hw_keys[$selected_hw]}"
|
||||
hw_name="${hw_names[$selected_hw]}"
|
||||
|
||||
echo "Selected: $hw_name"
|
||||
echo ""
|
||||
|
||||
# Step 2: Radio Settings Selection
|
||||
echo "=== Step 2: Select Radio Settings ==="
|
||||
echo ""
|
||||
@@ -104,13 +124,13 @@ API_RESPONSE=$(curl -s --max-time 5 https://api.meshcore.nz/api/v1/config 2>/dev
|
||||
if [ -z "$API_RESPONSE" ]; then
|
||||
echo "Warning: Failed to fetch configuration from API (timeout or error)"
|
||||
echo "Using local radio presets file..."
|
||||
|
||||
|
||||
LOCAL_PRESETS="$SCRIPT_DIR/radio-presets.json"
|
||||
if [ ! -f "$LOCAL_PRESETS" ]; then
|
||||
echo "Error: Local radio presets file not found at $LOCAL_PRESETS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
API_RESPONSE=$(cat "$LOCAL_PRESETS")
|
||||
if [ -z "$API_RESPONSE" ]; then
|
||||
echo "Error: Failed to read local radio presets file"
|
||||
@@ -179,14 +199,44 @@ echo "Selected: $title"
|
||||
echo "Frequency: ${freq}MHz, SF: $sf, BW: $bw, CR: $cr"
|
||||
echo ""
|
||||
|
||||
# Update config.yaml
|
||||
# KISS modem: prompt for serial port and baud rate
|
||||
if [ "$RADIO_TYPE" = "kiss" ]; then
|
||||
echo "=== KISS Modem Settings ==="
|
||||
echo ""
|
||||
default_port="/dev/ttyUSB0"
|
||||
read -p "Serial port [$default_port]: " kiss_port
|
||||
kiss_port=${kiss_port:-$default_port}
|
||||
default_baud="9600"
|
||||
read -p "Baud rate [$default_baud]: " kiss_baud
|
||||
kiss_baud=${kiss_baud:-$default_baud}
|
||||
echo "KISS: port=$kiss_port, baud_rate=$kiss_baud"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Ensure config file exists (create from example if missing)
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo "Error: Config file not found at $CONFIG_FILE"
|
||||
exit 1
|
||||
if [ -f "$CONFIG_DIR/config.yaml.example" ]; then
|
||||
cp "$CONFIG_DIR/config.yaml.example" "$CONFIG_FILE"
|
||||
echo "Created $CONFIG_FILE from config.yaml.example"
|
||||
elif [ -f "$SCRIPT_DIR/config.yaml.example" ]; then
|
||||
cp "$SCRIPT_DIR/config.yaml.example" "$CONFIG_FILE"
|
||||
echo "Created $CONFIG_FILE from $SCRIPT_DIR/config.yaml.example"
|
||||
else
|
||||
echo "Error: Config file not found at $CONFIG_FILE"
|
||||
echo "Copy config.yaml.example to config.yaml or run from a directory that has it."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Updating configuration..."
|
||||
|
||||
# Radio type (sx1262 or kiss)
|
||||
if grep -q "^radio_type:" "$CONFIG_FILE"; then
|
||||
sed "${SED_OPTS[@]}" "s/^radio_type:.*/radio_type: $RADIO_TYPE/" "$CONFIG_FILE"
|
||||
else
|
||||
{ echo "radio_type: $RADIO_TYPE"; cat "$CONFIG_FILE"; } > "$CONFIG_FILE.tmp" && mv "$CONFIG_FILE.tmp" "$CONFIG_FILE"
|
||||
fi
|
||||
|
||||
# Repeater name
|
||||
sed "${SED_OPTS[@]}" "s/^ node_name:.*/ node_name: \"$repeater_name\"/" "$CONFIG_FILE"
|
||||
|
||||
@@ -196,7 +246,18 @@ sed "${SED_OPTS[@]}" "s/^ spreading_factor:.*/ spreading_factor: $sf/" "$CONFI
|
||||
sed "${SED_OPTS[@]}" "s/^ bandwidth:.*/ bandwidth: $bw_hz/" "$CONFIG_FILE"
|
||||
sed "${SED_OPTS[@]}" "s/^ coding_rate:.*/ coding_rate: $cr/" "$CONFIG_FILE"
|
||||
|
||||
# Extract hardware-specific settings from radio-settings.json
|
||||
# KISS modem: update kiss section
|
||||
if [ "$RADIO_TYPE" = "kiss" ]; then
|
||||
if grep -q "^kiss:" "$CONFIG_FILE"; then
|
||||
sed "${SED_OPTS[@]}" "s/^ port:.*/ port: \"$kiss_port\"/" "$CONFIG_FILE"
|
||||
sed "${SED_OPTS[@]}" "s/^ baud_rate:.*/ baud_rate: $kiss_baud/" "$CONFIG_FILE"
|
||||
else
|
||||
printf '\nkiss:\n port: "%s"\n baud_rate: %s\n' "$kiss_port" "$kiss_baud" >> "$CONFIG_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract hardware-specific settings from radio-settings.json (SX1262 only)
|
||||
if [ "$RADIO_TYPE" = "sx1262" ]; then
|
||||
echo "Extracting hardware configuration from $HARDWARE_CONFIG..."
|
||||
|
||||
# Use jq to extract all fields from the selected hardware
|
||||
@@ -257,7 +318,7 @@ else
|
||||
[ -n "$irq_pin" ] && sed "${SED_OPTS[@]}" "s/^ irq_pin:.*/ irq_pin: $irq_pin/" "$CONFIG_FILE"
|
||||
[ -n "$txen_pin" ] && sed "${SED_OPTS[@]}" "s/^ txen_pin:.*/ txen_pin: $txen_pin/" "$CONFIG_FILE"
|
||||
[ -n "$rxen_pin" ] && sed "${SED_OPTS[@]}" "s/^ rxen_pin:.*/ rxen_pin: $rxen_pin/" "$CONFIG_FILE"
|
||||
|
||||
|
||||
# Handle LED pins - add if missing, update if present
|
||||
if [ -n "$txled_pin" ]; then
|
||||
if grep -q "^ txled_pin:" "$CONFIG_FILE"; then
|
||||
@@ -267,7 +328,7 @@ else
|
||||
sed "${SED_OPTS[@]}" "/^ rxen_pin:.*/a\\ txled_pin: $txled_pin" "$CONFIG_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [ -n "$rxled_pin" ]; then
|
||||
if grep -q "^ rxled_pin:" "$CONFIG_FILE"; then
|
||||
sed "${SED_OPTS[@]}" "s/^ rxled_pin:.*/ rxled_pin: $rxled_pin/" "$CONFIG_FILE"
|
||||
@@ -276,7 +337,7 @@ else
|
||||
sed "${SED_OPTS[@]}" "/^ txled_pin:.*/a\\ rxled_pin: $rxled_pin" "$CONFIG_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
[ -n "$tx_power" ] && sed "${SED_OPTS[@]}" "s/^ tx_power:.*/ tx_power: $tx_power/" "$CONFIG_FILE"
|
||||
[ -n "$preamble_length" ] && sed "${SED_OPTS[@]}" "s/^ preamble_length:.*/ preamble_length: $preamble_length/" "$CONFIG_FILE"
|
||||
|
||||
@@ -336,6 +397,7 @@ else
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f /tmp/radio_*_* "$CONFIG_FILE.bak"
|
||||
@@ -344,14 +406,19 @@ echo "Configuration updated successfully!"
|
||||
echo ""
|
||||
echo "Applied Configuration:"
|
||||
echo " Repeater Name: $repeater_name"
|
||||
echo " Radio Type: $RADIO_TYPE"
|
||||
echo " Hardware: $hw_name ($hw_key)"
|
||||
echo " Frequency: ${freq}MHz (${freq_hz}Hz)"
|
||||
echo " Spreading Factor: $sf"
|
||||
echo " Bandwidth: ${bw}kHz (${bw_hz}Hz)"
|
||||
echo " Coding Rate: $cr"
|
||||
if [ "$RADIO_TYPE" = "kiss" ]; then
|
||||
echo " KISS Port: $kiss_port"
|
||||
echo " KISS Baud Rate: $kiss_baud"
|
||||
fi
|
||||
echo ""
|
||||
echo "Hardware GPIO Configuration:"
|
||||
if [ -n "$bus_id" ]; then
|
||||
if [ "$RADIO_TYPE" = "sx1262" ] && [ -n "$bus_id" ]; then
|
||||
echo " Bus ID: $bus_id"
|
||||
echo " Chip Select: $cs_id (pin $cs_pin)"
|
||||
echo " Reset Pin: $reset_pin"
|
||||
|
||||
@@ -1031,10 +1031,6 @@ GOOD_PACKETS = [
|
||||
"Flood, path has 1 prior hop",
|
||||
lambda: _make_flood_packet(payload=b"\xDE\xAD", path=b"\x42")),
|
||||
|
||||
("good_flood_path_near_max",
|
||||
"Flood, path = MAX_PATH_SIZE - 1 (room for our hash)",
|
||||
lambda: _make_flood_packet(payload=b"\xFF", path=bytes(range(MAX_PATH_SIZE - 1)))),
|
||||
|
||||
("good_flood_binary_payload",
|
||||
"Flood, all-zero payload",
|
||||
lambda: _make_flood_packet(payload=b"\x00" * 16)),
|
||||
@@ -1112,6 +1108,11 @@ BAD_PACKETS = [
|
||||
lambda: _make_flood_packet(payload=b"\x01", path=bytes(range(MAX_PATH_SIZE))),
|
||||
"Path length"),
|
||||
|
||||
("bad_flood_path_near_max",
|
||||
"Flood, path = MAX_PATH_SIZE - 1 (63 hops; path_len encodes 0-63, cannot append)",
|
||||
lambda: _make_flood_packet(payload=b"\xFF", path=bytes(range(MAX_PATH_SIZE - 1))),
|
||||
"cannot append"),
|
||||
|
||||
("bad_path_over_max",
|
||||
"Path exceeds MAX_PATH_SIZE",
|
||||
lambda: _make_flood_packet(payload=b"\x01", path=bytes(range(MAX_PATH_SIZE + 5))),
|
||||
|
||||
Reference in New Issue
Block a user