Compare commits

..

40 Commits

Author SHA1 Message Date
l5y
4548f750d3 Add connection recovery for TCP interface (#186)
* Add connection recovery for TCP interface

* run black
2025-09-27 18:52:56 +02:00
l5y
31f02010d3 bump version to 0.3 (#191)
* bump version to 0.3

* update readme
2025-09-27 18:52:41 +02:00
l5y
ec1ea5cbba pgrade styles and fix interface issues (#190) 2025-09-27 18:46:56 +02:00
l5y
8500c59755 some updates in the front (#188)
* ok, i'm added correct image loader

* and some css

* make zebra in a table and add a background and some little changes in app

* for example you can check how it work on https://vrs.kdd2105.ru

* fix ai comments

---------

Co-authored-by: dkorotkih2014-hub <d.korotkih2014@gmail.com>
2025-09-27 18:18:02 +02:00
l5y
556dd6b51c Update last heard on node entry change (#185) 2025-09-26 20:43:53 +02:00
l5y
3863e2d63d Populate chat metadata for unknown nodes (#182)
* Populate chat metadata for unknown nodes

* run rufo

* fix comments

* run rufo
2025-09-26 16:45:42 +02:00
l5y
9e62621819 Update role colors to new palette (#183) 2025-09-26 16:08:14 +02:00
l5y
c8c7c8cc05 Add placeholder nodes for unknown senders (#181)
* Add placeholder nodes for unknown senders

* run rufo
2025-09-26 14:24:30 +02:00
l5y
5116313ab0 fix: update role colors and ordering for firmware 2.7.10 (#180) 2025-09-26 13:30:34 +02:00
l5y
66389dd27c Handle plain IP addresses in mesh TCP detection (#154)
* Fix TCP target detection for plain IPs

* run black
2025-09-26 13:25:42 +02:00
l5y
ee6501243f Handle encrypted messages (#173)
* Handle encrypted messages

* Remove redundant message node columns

* Preserve original numeric message senders

* Normalize message sender IDs in API responses

* Exclude encrypted messages from API responses

* run rufo
2025-09-24 07:34:28 +02:00
l5y
8dd912175d Add fallback display names for unnamed nodes (#171) 2025-09-23 19:06:28 +02:00
l5y
02f9fb45e2 Ensure routers render above other node types (#169) 2025-09-23 18:59:34 +02:00
l5y
4254dbda91 Reorder lint steps after tests in CI (#168) 2025-09-23 18:31:38 +02:00
l5y
a46bed1c33 Handle proto values in nodeinfo payloads (#167) 2025-09-23 18:31:22 +02:00
l5y
d711300442 Remove raw payload storage from database schema (#166) 2025-09-23 17:29:08 +02:00
l5y
98a8203591 Add POSITION_APP ingestion and API support (#160)
* Add POSITION_APP ingestion and API support

* Adjust mesh receive subscriptions and priorities

* run linters
2025-09-23 16:42:51 +02:00
l5y
084c5ae158 Add support for NODEINFO_APP packets (#159)
* Add support for NODEINFO_APP packets

* run black
2025-09-23 14:40:35 +02:00
l5y
17018aeb19 Derive SEO metadata from existing config (#153) 2025-09-23 08:20:42 +02:00
l5y
74b3da6f00 tests: create helper script to dump all mesh data from serial (#152)
* tests: create helper script to dump all mesh data from serial

* tests: use public callbacks for dump script
2025-09-23 08:09:31 +02:00
l5y
ab1217a8bf Limit chat log to recent entries (#151) 2025-09-22 18:54:09 +02:00
l5y
62de1480f7 Require time library before formatting ISO timestamps (#149)
* Require time library for ISO timestamp formatting

* Default to host networking in Compose
2025-09-22 09:21:04 +02:00
l5y
ab2e9b06e1 Define potatomesh network (#148) 2025-09-22 08:58:39 +02:00
l5y
e91ad24cf9 Fix sqlite3 native extension on Alpine (#146) 2025-09-22 08:12:48 +02:00
l5y
2e543b7cd4 Allow binding to all interfaces in app.sh (#147) 2025-09-22 08:11:36 +02:00
l5y
db4353ccdc Force building sqlite3 gem on Alpine (#145) 2025-09-22 08:10:00 +02:00
l5y
5a610cf08a Support mock serial interface in CI (#143) 2025-09-21 10:00:30 +02:00
l5y
71b854998c Fix Docker workflow to build linux images (#142) 2025-09-21 09:39:09 +02:00
l5y
0a70ae4b3e Add clickable role filters to the map legend (#140)
* Make map legend role entries filter nodes

* Adjust map legend spacing and toggle text
2025-09-21 09:33:48 +02:00
l5y
6e709b0b67 Rebuild chat log on each refresh (#139) 2025-09-21 09:19:07 +02:00
l5y
a4256cee83 fix: retain runtime libs for alpine production (#138) 2025-09-21 09:18:55 +02:00
l5y
89f0b1bcfe fix: support windows ingestor build (#136)
* fix: support windows ingestor build

* fix: restore alpine build deps for ingestor (#137)
2025-09-20 22:00:45 +02:00
l5y
e8af3b2397 fix: use supported ruby image (#135) 2025-09-20 19:10:36 +00:00
Taylor Rose
812d3c851f feat: Add comprehensive Docker support (#122)
* feat: Add comprehensive Docker support

- Add multi-container Docker setup with web app and data ingestor
- Create production-ready Dockerfiles with multi-stage builds
- Add Docker Compose configurations for dev, prod, and custom environments
- Implement CI/CD pipeline with GitHub Actions for automated builds
- Add comprehensive Docker documentation and setup guides
- Include security scanning and multi-platform builds
- Support for Meshtastic device integration via serial access
- Persistent data storage with named volumes
- Health checks and monitoring capabilities

Addresses GitHub issue #120: Dockerize the project for easier community adoption

Files added:
- web/Dockerfile: Ruby web application container
- data/Dockerfile: Python data ingestor container
- data/requirements.txt: Python dependencies
- docker-compose.yml: Base Docker Compose configuration
- docker-compose.dev.yml: Development environment overrides
- docker-compose.prod.yml: Production environment overrides
- .env.example: Environment configuration template
- .dockerignore: Docker build context optimization
- .github/workflows/docker.yml: CI/CD pipeline
- DOCKER.md: Comprehensive Docker documentation

This implementation transforms PotatoMesh from a complex manual setup
to a single-command deployment: docker-compose up -d

* feat: Add Docker support with multi-architecture builds

- Add web/Dockerfile with Ruby 3.4 Alpine base
- Add data/Dockerfile with Python 3.13 Alpine base
- Use Alpine's SQLite3 packages for cross-platform compatibility
- Support AMD64, ARM64, ARMv7, and Windows architectures
- Multi-stage builds for optimized production images
- Non-root user security and proper file permissions

* feat: Add Docker Compose configurations for different environments

- docker-compose.yml: Production setup with GHCR images
- docker-compose.dev.yml: Development setup with local builds
- docker-compose.raspberry-pi.yml: Pi-optimized with resource limits
- Support for all architectures (AMD64, ARM64, ARMv7)
- Proper volume mounts and network configuration
- Environment variable configuration for different deployments

* feat: Add GitHub Actions workflows for Docker CI/CD

- docker.yml: Multi-architecture build and push to GHCR
- test-raspberry-pi-hardware.yml: ARM64 testing with QEMU
- Support for manual workflow dispatch with version input
- Build and test all Docker variants (AMD64, ARM64, ARMv7, Windows)
- Automated publishing to GitHub Container Registry
- Comprehensive testing for Raspberry Pi deployments

* feat: Add Docker documentation and configuration tools

- docs/DOCKER.md: Comprehensive Docker setup and usage guide
- configure.sh: Interactive configuration script for deployment
- Platform-specific setup instructions (macOS, Linux, Windows)
- Raspberry Pi optimization guidelines
- Environment variable configuration
- Troubleshooting and best practices

* docs: Update README with comprehensive Docker support

- Add Docker Quick Start section with published images
- Add comprehensive table of all available GHCR images
- Include architecture-specific pull commands
- Update manual installation instructions
- Add platform-specific deployment examples
- Document all supported architectures and use cases

* chore: Update dependencies and project configuration

- Update data/requirements.txt for Python 3.13 compatibility
- Add v0.3.0 changelog entry documenting Docker support
- Update .gitignore for Docker-related files
- Prepare project for Docker deployment

* feat: Update web interface for Denver Mesh Network

- Update default configuration to center on Denver, Colorado
- Set SITE_NAME to 'Denver Mesh Network'
- Configure 915MHz frequency for US region
- Update map center coordinates (39.7392, -104.9903)
- Set appropriate node distance and Matrix room settings

* Update Docker configuration and documentation

- Remove Raspberry Pi specific Docker files and workflows
- Update Docker workflow configuration
- Consolidate Docker documentation
- Add AGENTS.md for opencode integration
- Update README with current project status

* cleanup: workflow/readme

* Update README.md

Co-authored-by: l5y <220195275+l5yth@users.noreply.github.com>

* Add .env.example and simplify documentation

- Add comprehensive .env.example with all environment variables
- Update web Dockerfile to use Berlin coordinates instead of Denver
- Simplify README Docker quick start with helpful comments
- Greatly simplify DOCKER.md with only essential information

* cleanup: readme

* Remove Stadia API key references

- Remove STADIA_API_KEY from docker-compose.yml environment variables
- Remove Stadia Maps configuration section from configure.sh
- Remove Stadia API key references from .env.example
- Simplify configuration to use basic OpenStreetMap tiles only

* quickfix

* cleanup: remove example usage from docker gh action output

---------

Co-authored-by: l5y <220195275+l5yth@users.noreply.github.com>
2025-09-20 21:04:19 +02:00
l5y
608d1e0396 bump version to 0.2.1 (#134) 2025-09-20 20:59:21 +02:00
l5y
63787454ca Fix dark mode tile styling on new map tiles (#132)
* Ensure dark mode styling applied to new map tiles

* Ensure dark mode filters apply to new map tiles

* Improve map tile filter handling
2025-09-20 18:13:18 +02:00
l5y
55c1384f80 Switch map tiles to OSM HOT and add theme filters (#130)
* Switch map tiles to OSM HOT and add theme filters

* Ensure OSM tiles are filtered for theme modes

* Ensure tile filters update when toggling dark mode

* run rufo
2025-09-19 23:02:55 +02:00
l5y
6750d7bc12 Add footer version display (#128)
* Add footer version display

* Ensure footer version text matches spec
2025-09-19 11:22:28 +02:00
l5y
d33fcaf5db Add responsive controls for map legend (#129) 2025-09-19 11:21:00 +02:00
l5y
7974fd9597 update changelog (#119) 2025-09-17 16:57:32 +02:00
30 changed files with 4591 additions and 288 deletions

76
.dockerignore Normal file
View File

@@ -0,0 +1,76 @@
# Git
.git
.gitignore
# Documentation
README.md
CHANGELOG.md
*.md
# Docker files
docker-compose*.yml
.dockerignore
# Environment files
.env*
!.env.example
# Logs
*.log
logs/
# Runtime data
*.pid
*.seed
*.pid.lock
# Coverage directory used by tools like istanbul
coverage/
# nyc test coverage
.nyc_output
# Dependency directories
node_modules/
vendor/
# Optional npm cache directory
.npm
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
# IDE files
.vscode/
.idea/
*.swp
*.swo
*~
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Test files
tests/
spec/
test_*
*_test.py
*_spec.rb
# Development files
ai_docs/

77
.env.example Normal file
View File

@@ -0,0 +1,77 @@
# PotatoMesh Environment Configuration
# Copy this file to .env and customize for your setup
# =============================================================================
# REQUIRED SETTINGS
# =============================================================================
# API authentication token (required for ingestor communication)
# Generate a secure token: openssl rand -hex 32
API_TOKEN=your-secure-api-token-here
# Meshtastic device path (required for ingestor)
# Common paths:
# - Linux: /dev/ttyACM0, /dev/ttyUSB0
# - macOS: /dev/cu.usbserial-*
# - Windows (WSL): /dev/ttyS*
MESH_SERIAL=/dev/ttyACM0
# =============================================================================
# SITE CUSTOMIZATION
# =============================================================================
# Your mesh network name
SITE_NAME=My Meshtastic Network
# Default Meshtastic channel
DEFAULT_CHANNEL=#MediumFast
# Default frequency for your region
# Common frequencies: 868MHz (Europe), 915MHz (US), 433MHz (Worldwide)
DEFAULT_FREQUENCY=868MHz
# Map center coordinates (latitude, longitude)
# Berlin, Germany: 52.502889, 13.404194
# Denver, Colorado: 39.7392, -104.9903
# London, UK: 51.5074, -0.1278
MAP_CENTER_LAT=52.502889
MAP_CENTER_LON=13.404194
# Maximum distance to show nodes (kilometers)
MAX_NODE_DISTANCE_KM=50
# =============================================================================
# OPTIONAL INTEGRATIONS
# =============================================================================
# Matrix chat room for your community (optional)
# Format: !roomid:matrix.org
MATRIX_ROOM='#meshtastic-berlin:matrix.org'
# =============================================================================
# ADVANCED SETTINGS
# =============================================================================
# Debug mode (0=off, 1=on)
DEBUG=0
# Docker Compose networking profile
# Leave unset for Linux hosts (default host networking).
# Set to "bridge" on Docker Desktop (macOS/Windows) if host networking
# is unavailable.
# COMPOSE_PROFILES=bridge
# Meshtastic snapshot interval (seconds)
MESH_SNAPSHOT_SECS=60
# Meshtastic channel index (0=primary, 1=secondary, etc.)
MESH_CHANNEL_INDEX=0
# Database settings
DB_BUSY_TIMEOUT_MS=5000
DB_BUSY_MAX_RETRIES=5
DB_BUSY_RETRY_DELAY=0.05
# Application settings
MAX_JSON_BODY_BYTES=1048576

18
.github/workflows/README.md vendored Normal file
View File

@@ -0,0 +1,18 @@
# GitHub Actions Workflows
## Workflows
- **`docker.yml`** - Build and push Docker images to GHCR
- **`codeql.yml`** - Security scanning
- **`python.yml`** - Python testing
- **`ruby.yml`** - Ruby testing
## Usage
```bash
# Build locally
docker-compose build
# Deploy
docker-compose up -d
```

171
.github/workflows/docker.yml vendored Normal file
View File

@@ -0,0 +1,171 @@
name: Build and Push Docker Images
on:
push:
tags: [ 'v*' ]
workflow_dispatch:
inputs:
version:
description: 'Version to publish (e.g., 1.0.0)'
required: true
default: '1.0.0'
publish_all_variants:
description: 'Publish all Docker image variants (latest tag)'
type: boolean
default: false
env:
REGISTRY: ghcr.io
IMAGE_PREFIX: l5yth/potato-mesh
jobs:
build-and-push:
runs-on: ubuntu-latest
if: (startsWith(github.ref, 'refs/tags/v') && github.event_name == 'push') || github.event_name == 'workflow_dispatch'
environment: production
permissions:
contents: read
packages: write
strategy:
matrix:
service: [web, ingestor]
architecture:
- { name: linux-amd64, platform: linux/amd64, label: "Linux x86_64" }
- { name: linux-arm64, platform: linux/arm64, label: "Linux ARM64" }
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up QEMU emulation
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract version from tag or input
id: version
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
VERSION="${{ github.event.inputs.version }}"
else
VERSION=${GITHUB_REF#refs/tags/v}
fi
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Published version: $VERSION"
- name: Build and push ${{ matrix.service }} for ${{ matrix.architecture.name }}
uses: docker/build-push-action@v5
with:
context: .
file: ./${{ matrix.service == 'web' && 'web/Dockerfile' || 'data/Dockerfile' }}
target: production
platforms: ${{ matrix.architecture.platform }}
push: true
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service }}-${{ matrix.architecture.name }}:latest
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service }}-${{ matrix.architecture.name }}:${{ steps.version.outputs.version }}
labels: |
org.opencontainers.image.source=https://github.com/${{ github.repository }}
org.opencontainers.image.description=PotatoMesh ${{ matrix.service == 'web' && 'Web Application' || 'Python Ingestor' }} for ${{ matrix.architecture.label }}
org.opencontainers.image.licenses=Apache-2.0
org.opencontainers.image.version=${{ steps.version.outputs.version }}
org.opencontainers.image.created=${{ github.event.head_commit.timestamp }}
org.opencontainers.image.revision=${{ github.sha }}
org.opencontainers.image.title=PotatoMesh ${{ matrix.service == 'web' && 'Web' || 'Ingestor' }} (${{ matrix.architecture.label }})
org.opencontainers.image.vendor=PotatoMesh
org.opencontainers.image.architecture=${{ matrix.architecture.name }}
org.opencontainers.image.os=linux
org.opencontainers.image.arch=${{ matrix.architecture.name }}
cache-from: type=gha,scope=${{ matrix.service }}-${{ matrix.architecture.name }}
cache-to: type=gha,mode=max,scope=${{ matrix.service }}-${{ matrix.architecture.name }}
test-images:
runs-on: ubuntu-latest
needs: build-and-push
if: startsWith(github.ref, 'refs/tags/v') && github.event_name == 'push'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract version from tag
id: version
run: |
VERSION=${GITHUB_REF#refs/tags/v}
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: Test web application (Linux AMD64)
run: |
docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-amd64:${{ steps.version.outputs.version }}
docker run --rm -d --name web-test -p 41447:41447 \
-e API_TOKEN=test-token \
-e DEBUG=1 \
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-amd64:${{ steps.version.outputs.version }}
sleep 10
curl -f http://localhost:41447/ || exit 1
docker stop web-test
- name: Test ingestor (Linux AMD64)
run: |
docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-amd64:${{ steps.version.outputs.version }}
docker run --rm --name ingestor-test \
-e POTATOMESH_INSTANCE=http://localhost:41447 \
-e API_TOKEN=test-token \
-e MESH_SERIAL=mock \
-e DEBUG=1 \
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-amd64:${{ steps.version.outputs.version }} &
sleep 5
docker stop ingestor-test || true
publish-summary:
runs-on: ubuntu-latest
needs: [build-and-push, test-images]
if: always() && startsWith(github.ref, 'refs/tags/v') && github.event_name == 'push'
steps:
- name: Extract version from tag
id: version
run: |
VERSION=${GITHUB_REF#refs/tags/v}
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: Publish release summary
run: |
echo "## 🚀 PotatoMesh Images Published to GHCR" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Published Images:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Web images
echo "### 🌐 Web Application" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-amd64:latest\` - Linux x86_64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-web-linux-arm64:latest\` - Linux ARM64" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Ingestor images
echo "### 📡 Ingestor Service" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-amd64:latest\` - Linux x86_64" >> $GITHUB_STEP_SUMMARY
echo "- \`${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-ingestor-linux-arm64:latest\` - Linux ARM64" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

View File

@@ -22,9 +22,6 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install black pytest pytest-cov meshtastic
- name: Lint with black
run: |
black --check ./
- name: Test with pytest and coverage
run: |
mkdir -p reports
@@ -45,3 +42,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
files: reports/python-junit.xml
flags: python-ingestor
- name: Lint with black
run: |
black --check ./

View File

@@ -29,8 +29,6 @@ jobs:
working-directory: ./web
- name: Set up dependencies
run: bundle install
- name: Run rufo
run: bundle exec rufo --check .
- name: Run tests
run: |
mkdir -p tmp/test-results
@@ -53,3 +51,5 @@ jobs:
flags: ruby-${{ matrix.ruby-version }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Run rufo
run: bundle exec rufo --check .

6
.gitignore vendored
View File

@@ -11,7 +11,7 @@
/tmp/
# Used by dotenv library to load environment variables.
# .env
.env
# Ignore Byebug command history file.
.byebug_history
@@ -62,3 +62,7 @@ coverage/
coverage.xml
htmlcov/
reports/
# AI planning and documentation
ai_docs/
*.log

View File

@@ -1,7 +1,65 @@
# CHANGELOG
## v0.3.0
* Add comprehensive Docker support with multi-architecture builds and automated CI/CD by @trose in <https://github.com/l5yth/potato-mesh/pull/122>
## v0.2.0
* Update readme for 0.2 by @l5yth in <https://github.com/l5yth/potato-mesh/pull/118>
* Add PotatoMesh logo to header and favicon by @l5yth in <https://github.com/l5yth/potato-mesh/pull/117>
* Harden API auth and request limits by @l5yth in <https://github.com/l5yth/potato-mesh/pull/116>
* Add client-side sorting to node table by @l5yth in <https://github.com/l5yth/potato-mesh/pull/114>
* Add short name overlay for node details by @l5yth in <https://github.com/l5yth/potato-mesh/pull/111>
* Adjust python ingestor interval to 60 seconds by @l5yth in <https://github.com/l5yth/potato-mesh/pull/112>
* Hide location columns on medium screens by @l5yth in <https://github.com/l5yth/potato-mesh/pull/109>
* Handle message updates based on sender info by @l5yth in <https://github.com/l5yth/potato-mesh/pull/108>
* Prioritize node posts in queued API updates by @l5yth in <https://github.com/l5yth/potato-mesh/pull/107>
* Add auto-refresh toggle to UI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/105>
* Adjust Leaflet popup styling for dark mode by @l5yth in <https://github.com/l5yth/potato-mesh/pull/104>
* Add site info overlay by @l5yth in <https://github.com/l5yth/potato-mesh/pull/103>
* Add long name tooltip to short name badge by @l5yth in <https://github.com/l5yth/potato-mesh/pull/102>
* Ensure node numeric aliases are derived from canonical IDs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/101>
* Chore: clean up repository by @l5yth in <https://github.com/l5yth/potato-mesh/pull/96>
* Handle SQLite busy errors when upserting nodes by @l5yth in <https://github.com/l5yth/potato-mesh/pull/100>
* Configure Sinatra logging level from DEBUG flag by @l5yth in <https://github.com/l5yth/potato-mesh/pull/97>
* Add penetration tests for authentication and SQL injection by @l5yth in <https://github.com/l5yth/potato-mesh/pull/95>
* Document Python and Ruby source modules by @l5yth in <https://github.com/l5yth/potato-mesh/pull/94>
* Add tests covering mesh helper edge cases by @l5yth in <https://github.com/l5yth/potato-mesh/pull/93>
* Fix py code cov by @l5yth in <https://github.com/l5yth/potato-mesh/pull/92>
* Add Codecov reporting to Python CI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/91>
* Skip null identifiers when selecting packet fields by @l5yth in <https://github.com/l5yth/potato-mesh/pull/88>
* Create python yml ga by @l5yth in <https://github.com/l5yth/potato-mesh/pull/90>
* Add unit tests for mesh ingestor script by @l5yth in <https://github.com/l5yth/potato-mesh/pull/89>
* Add coverage for debug logging on messages without sender by @l5yth in <https://github.com/l5yth/potato-mesh/pull/86>
* Handle concurrent node snapshot updates by @l5yth in <https://github.com/l5yth/potato-mesh/pull/85>
* Fix ingestion mapping for message sender IDs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/84>
* Add coverage for API authentication and payload edge cases by @l5yth in <https://github.com/l5yth/potato-mesh/pull/83>
* Add JUnit test reporting to Ruby CI by @l5yth in <https://github.com/l5yth/potato-mesh/pull/82>
* Configure SimpleCov reporting for Codecov by @l5yth in <https://github.com/l5yth/potato-mesh/pull/81>
* Update codecov job by @l5yth in <https://github.com/l5yth/potato-mesh/pull/80>
* Fix readme badges by @l5yth in <https://github.com/l5yth/potato-mesh/pull/79>
* Add Codecov upload step to Ruby workflow by @l5yth in <https://github.com/l5yth/potato-mesh/pull/78>
* Add Apache license headers to source files by @l5yth in <https://github.com/l5yth/potato-mesh/pull/77>
* Add integration specs for node and message APIs by @l5yth in <https://github.com/l5yth/potato-mesh/pull/76>
* Docs: update for 0.2.0 release by @l5yth in <https://github.com/l5yth/potato-mesh/pull/75>
* Create ruby workflow by @l5yth in <https://github.com/l5yth/potato-mesh/pull/74>
* Add RSpec smoke tests for app boot and database init by @l5yth in <https://github.com/l5yth/potato-mesh/pull/73>
* Align refresh controls with status text by @l5yth in <https://github.com/l5yth/potato-mesh/pull/72>
* Improve mobile layout by @l5yth in <https://github.com/l5yth/potato-mesh/pull/68>
* Normalize message sender IDs using node numbers by @l5yth in <https://github.com/l5yth/potato-mesh/pull/67>
* Style: condense node table by @l5yth in <https://github.com/l5yth/potato-mesh/pull/65>
* Log debug details for messages without sender by @l5yth in <https://github.com/l5yth/potato-mesh/pull/64>
* Fix nested dataclass serialization for node snapshots by @l5yth in <https://github.com/l5yth/potato-mesh/pull/63>
* Log node object on snapshot update failure by @l5yth in <https://github.com/l5yth/potato-mesh/pull/62>
* Initialize database on startup by @l5yth in <https://github.com/l5yth/potato-mesh/pull/61>
* Send mesh data to Potatomesh API by @l5yth in <https://github.com/l5yth/potato-mesh/pull/60>
* Convert boolean flags for SQLite binding by @l5yth in <https://github.com/l5yth/potato-mesh/pull/59>
* Use packet id as message primary key by @l5yth in <https://github.com/l5yth/potato-mesh/pull/58>
* Add message ingestion API and stricter auth by @l5yth in <https://github.com/l5yth/potato-mesh/pull/56>
* Feat: parameterize community info by @l5yth in <https://github.com/l5yth/potato-mesh/pull/55>
* Feat: add dark mode toggle by @l5yth in <https://github.com/l5yth/potato-mesh/pull/54>
## v0.1.0
* Show daily node count in title and header by @l5yth in <https://github.com/l5yth/potato-mesh/pull/49>

103
DOCKER.md Normal file
View File

@@ -0,0 +1,103 @@
# PotatoMesh Docker Setup
## Quick Start
```bash
./configure.sh
docker-compose up -d
docker-compose logs -f
```
The default configuration attaches both services to the host network. This
avoids creating Docker bridge interfaces on platforms where that operation is
blocked. Access the dashboard at `http://127.0.0.1:41447` as soon as the
containers are running. On Docker Desktop (macOS/Windows) or when you prefer
traditional bridged networking, start Compose with the `bridge` profile:
```bash
COMPOSE_PROFILES=bridge docker-compose up -d
```
Access at `http://localhost:41447`
## Configuration
Edit `.env` file or run `./configure.sh` to set:
- `API_TOKEN` - Required for ingestor authentication
- `MESH_SERIAL` - Your Meshtastic device path (e.g., `/dev/ttyACM0`)
- `SITE_NAME` - Your mesh network name
- `MAP_CENTER_LAT/LON` - Map center coordinates
## Device Setup
**Find your device:**
```bash
# Linux
ls /dev/ttyACM* /dev/ttyUSB*
# macOS
ls /dev/cu.usbserial-*
# Windows
ls /dev/ttyS*
```
**Set permissions (Linux/macOS):**
```bash
sudo chmod 666 /dev/ttyACM0
# Or add user to dialout group
sudo usermod -a -G dialout $USER
```
## Common Commands
```bash
# Start services
docker-compose up -d
# View logs
docker-compose logs -f
# Stop services
docker-compose down
# Stop and remove data
docker-compose down -v
# Update images
docker-compose pull && docker-compose up -d
```
## Troubleshooting
**Device access issues:**
```bash
# Check device exists and permissions
ls -la /dev/ttyACM0
# Fix permissions
sudo chmod 666 /dev/ttyACM0
```
**Port conflicts:**
```bash
# Find what's using port 41447
sudo lsof -i :41447
```
**Container issues:**
```bash
# Check logs
docker-compose logs
# Restart services
docker-compose restart
```
For more Docker help, see [Docker Compose documentation](https://docs.docker.com/compose/).

View File

@@ -16,7 +16,25 @@ A simple Meshtastic-powered node dashboard for your local community. _No MQTT cl
Live demo for Berlin #MediumFast: [potatomesh.net](https://potatomesh.net)
![screenshot of the second version](./scrot-0.2.png)
![screenshot of the third version](./scrot-0.3.png)
## Quick Start with Docker
```bash
./configure.sh # Configure your setup
docker-compose up -d # Start services
docker-compose logs -f # View logs
```
PotatoMesh uses host networking by default so it can run on restricted
systems where Docker cannot create bridged interfaces. The web UI listens on
`http://127.0.0.1:41447` immediately without explicit port mappings. If you
are using Docker Desktop (macOS/Windows) or otherwise require bridged
networking, enable the Compose profile with:
```bash
COMPOSE_PROFILES=bridge docker-compose up -d
```
## Web App
@@ -55,6 +73,10 @@ The web app can be configured with environment variables (defaults shown):
* `MAX_NODE_DISTANCE_KM` - hide nodes farther than this distance from the center (default: `137`)
* `MATRIX_ROOM` - matrix room id for a footer link (default: `#meshtastic-berlin:matrix.org`)
The application derives SEO-friendly document titles, descriptions, and social
preview tags from these existing configuration values and reuses the bundled
logo for Open Graph and Twitter cards.
Example:
```bash
@@ -66,8 +88,10 @@ SITE_NAME="Meshtastic Berlin" MAP_CENTER_LAT=52.502889 MAP_CENTER_LON=13.404194
The web app contains an API:
* GET `/api/nodes?limit=100` - returns the latest 100 nodes reported to the app
* GET `/api/positions?limit=100` - returns the latest 100 position data
* GET `/api/messages?limit=100` - returns the latest 100 messages
* POST `/api/nodes` - upserts nodes provided as JSON object mapping node ids to node data (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends positions provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
* POST `/api/messages` - appends messages provided as a JSON object or array (requires `Authorization: Bearer <API_TOKEN>`)
The `API_TOKEN` environment variable must be set to a non-empty value and match the token supplied in the `Authorization` header for `POST` requests.
@@ -80,8 +104,9 @@ accepts data through the API POST endpoints. Benefit is, here multiple nodes acr
community can feed the dashboard with data. The web app handles messages and nodes
by ID and there will be no duplication.
For convenience, the directory `./data` contains a Python ingestor. It connects to a local
Meshtastic node via serial port to gather nodes and messages seen by the node.
For convenience, the directory `./data` contains a Python ingestor. It connects to a
Meshtastic node via serial port or to a remote device that exposes the Meshtastic TCP
interface to gather nodes and messages seen by the node.
```bash
pacman -S python
@@ -108,7 +133,13 @@ Mesh daemon: nodes+messages → http://127.0.0.1 | port=41447 | channel=0
Run the script with `POTATOMESH_INSTANCE` and `API_TOKEN` to keep updating
node records and parsing new incoming messages. Enable debug output with `DEBUG=1`,
specify the serial port with `MESH_SERIAL` (default `/dev/ttyACM0`), etc.
specify the serial port with `MESH_SERIAL` (default `/dev/ttyACM0`) or set it to an IP
address (for example `192.168.1.20:4403`) to use the Meshtastic TCP interface.
## Demos
* <https://potatomesh.net/>
* <https://vrs.kdd2105.ru/>
## License

155
configure.sh Executable file
View File

@@ -0,0 +1,155 @@
#!/bin/bash
# PotatoMesh Configuration Script
# This script helps you configure your PotatoMesh instance with your local settings
set -e
echo "🥔 PotatoMesh Configuration"
echo "=========================="
echo ""
# Check if .env exists, if not create from .env.example
if [ ! -f .env ]; then
if [ -f .env.example ]; then
echo "📋 Creating .env file from .env.example..."
cp .env.example .env
else
echo "📋 Creating new .env file..."
touch .env
fi
fi
echo "🔧 Let's configure your PotatoMesh instance!"
echo ""
# Function to read input with default
read_with_default() {
local prompt="$1"
local default="$2"
local var_name="$3"
if [ -n "$default" ]; then
read -p "$prompt [$default]: " input
input=${input:-$default}
else
read -p "$prompt: " input
fi
eval "$var_name='$input'"
}
# Function to update .env file
update_env() {
local key="$1"
local value="$2"
if grep -q "^$key=" .env; then
# Update existing value
sed -i.bak "s/^$key=.*/$key=$value/" .env
else
# Add new value
echo "$key=$value" >> .env
fi
}
# Get current values from .env if they exist
SITE_NAME=$(grep "^SITE_NAME=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "My Meshtastic Network")
DEFAULT_CHANNEL=$(grep "^DEFAULT_CHANNEL=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "#MediumFast")
DEFAULT_FREQUENCY=$(grep "^DEFAULT_FREQUENCY=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "868MHz")
MAP_CENTER_LAT=$(grep "^MAP_CENTER_LAT=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "52.502889")
MAP_CENTER_LON=$(grep "^MAP_CENTER_LON=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "13.404194")
MAX_NODE_DISTANCE_KM=$(grep "^MAX_NODE_DISTANCE_KM=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "50")
MATRIX_ROOM=$(grep "^MATRIX_ROOM=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "")
API_TOKEN=$(grep "^API_TOKEN=" .env 2>/dev/null | cut -d'=' -f2- | tr -d '"' || echo "")
echo "📍 Location Settings"
echo "-------------------"
read_with_default "Site Name (your mesh network name)" "$SITE_NAME" SITE_NAME
read_with_default "Map Center Latitude" "$MAP_CENTER_LAT" MAP_CENTER_LAT
read_with_default "Map Center Longitude" "$MAP_CENTER_LON" MAP_CENTER_LON
read_with_default "Max Node Distance (km)" "$MAX_NODE_DISTANCE_KM" MAX_NODE_DISTANCE_KM
echo ""
echo "📡 Meshtastic Settings"
echo "---------------------"
read_with_default "Default Channel" "$DEFAULT_CHANNEL" DEFAULT_CHANNEL
read_with_default "Default Frequency (868MHz, 915MHz, etc.)" "$DEFAULT_FREQUENCY" DEFAULT_FREQUENCY
echo ""
echo "💬 Optional Settings"
echo "-------------------"
read_with_default "Matrix Room (optional, e.g., #meshtastic-berlin:matrix.org)" "$MATRIX_ROOM" MATRIX_ROOM
echo ""
echo "🔐 Security Settings"
echo "-------------------"
echo "The API token is used for secure communication between the web app and ingestor."
echo "You can provide your own custom token or let us generate a secure one for you."
echo ""
if [ -z "$API_TOKEN" ]; then
echo "No existing API token found. Generating a secure token..."
API_TOKEN=$(openssl rand -hex 32 2>/dev/null || python3 -c "import secrets; print(secrets.token_hex(32))" 2>/dev/null || echo "your-secure-api-token-here")
echo "✅ Generated secure API token: ${API_TOKEN:0:8}..."
echo ""
read -p "Use this generated token? (Y/n): " use_generated
if [[ "$use_generated" =~ ^[Nn]$ ]]; then
read -p "Enter your custom API token: " API_TOKEN
fi
else
echo "Existing API token found: ${API_TOKEN:0:8}..."
read -p "Keep existing token? (Y/n): " keep_existing
if [[ "$keep_existing" =~ ^[Nn]$ ]]; then
read -p "Enter new API token (or press Enter to generate): " new_token
if [ -n "$new_token" ]; then
API_TOKEN="$new_token"
else
echo "Generating new secure token..."
API_TOKEN=$(openssl rand -hex 32 2>/dev/null || python3 -c "import secrets; print(secrets.token_hex(32))" 2>/dev/null || echo "your-secure-api-token-here")
echo "✅ Generated new API token: ${API_TOKEN:0:8}..."
fi
fi
fi
echo ""
echo "📝 Updating .env file..."
# Update .env file
update_env "SITE_NAME" "\"$SITE_NAME\""
update_env "DEFAULT_CHANNEL" "\"$DEFAULT_CHANNEL\""
update_env "DEFAULT_FREQUENCY" "\"$DEFAULT_FREQUENCY\""
update_env "MAP_CENTER_LAT" "$MAP_CENTER_LAT"
update_env "MAP_CENTER_LON" "$MAP_CENTER_LON"
update_env "MAX_NODE_DISTANCE_KM" "$MAX_NODE_DISTANCE_KM"
update_env "MATRIX_ROOM" "\"$MATRIX_ROOM\""
update_env "API_TOKEN" "$API_TOKEN"
# Add other common settings if they don't exist
if ! grep -q "^MESH_SERIAL=" .env; then
echo "MESH_SERIAL=/dev/ttyACM0" >> .env
fi
if ! grep -q "^DEBUG=" .env; then
echo "DEBUG=0" >> .env
fi
# Clean up backup file
rm -f .env.bak
echo ""
echo "✅ Configuration complete!"
echo ""
echo "📋 Your settings:"
echo " Site Name: $SITE_NAME"
echo " Map Center: $MAP_CENTER_LAT, $MAP_CENTER_LON"
echo " Max Distance: ${MAX_NODE_DISTANCE_KM}km"
echo " Channel: $DEFAULT_CHANNEL"
echo " Frequency: $DEFAULT_FREQUENCY"
echo " Matrix Room: ${MATRIX_ROOM:-'Not set'}"
echo " API Token: ${API_TOKEN:0:8}..."
echo ""
echo "🚀 You can now start PotatoMesh with:"
echo " docker-compose up -d"
echo ""
echo "📖 For more configuration options, see the README.md"

72
data/Dockerfile Normal file
View File

@@ -0,0 +1,72 @@
# syntax=docker/dockerfile:1.6
ARG TARGETOS=linux
ARG PYTHON_VERSION=3.12.6
# Linux production image
FROM python:${PYTHON_VERSION}-alpine AS production-linux
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
COPY data/requirements.txt ./
RUN set -eux; \
apk add --no-cache \
tzdata \
curl \
libstdc++ \
libgcc; \
apk add --no-cache --virtual .build-deps \
gcc \
musl-dev \
linux-headers \
build-base; \
python -m pip install --no-cache-dir -r requirements.txt; \
apk del .build-deps
COPY data/ .
RUN addgroup -S potatomesh && \
adduser -S potatomesh -G potatomesh && \
adduser potatomesh dialout && \
chown -R potatomesh:potatomesh /app
USER potatomesh
ENV MESH_SERIAL=/dev/ttyACM0 \
MESH_SNAPSHOT_SECS=60 \
MESH_CHANNEL_INDEX=0 \
DEBUG=0 \
POTATOMESH_INSTANCE="" \
API_TOKEN=""
CMD ["python", "mesh.py"]
# Windows production image
FROM python:${PYTHON_VERSION}-windowsservercore-ltsc2022 AS production-windows
SHELL ["cmd", "/S", "/C"]
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /app
COPY data/requirements.txt ./
RUN python -m pip install --no-cache-dir -r requirements.txt
COPY data/ .
USER ContainerUser
ENV MESH_SERIAL=/dev/ttyACM0 \
MESH_SNAPSHOT_SECS=60 \
MESH_CHANNEL_INDEX=0 \
DEBUG=0 \
POTATOMESH_INSTANCE="" \
API_TOKEN=""
CMD ["python", "mesh.py"]
FROM production-${TARGETOS} AS production

View File

@@ -22,16 +22,21 @@ them to the accompanying web API. It also provides the long-running daemon
entry point that performs these synchronisation tasks.
"""
import base64
import dataclasses
import heapq
import ipaddress
import itertools
import json, os, time, threading, signal, urllib.request, urllib.error
import json, os, time, threading, signal, urllib.request, urllib.error, urllib.parse
import math
from collections.abc import Mapping
from meshtastic.serial_interface import SerialInterface
from meshtastic.tcp_interface import TCPInterface
from pubsub import pub
from google.protobuf.json_format import MessageToDict
from google.protobuf.message import Message as ProtoMessage
from google.protobuf.message import DecodeError
# --- Config (env overrides) ---------------------------------------------------
PORT = os.environ.get("MESH_SERIAL", "/dev/ttyACM0")
@@ -42,16 +47,130 @@ INSTANCE = os.environ.get("POTATOMESH_INSTANCE", "").rstrip("/")
API_TOKEN = os.environ.get("API_TOKEN", "")
# --- Serial interface helpers --------------------------------------------------
_DEFAULT_TCP_PORT = 4403
# Reconnect configuration: retry delays are adjustable via environment
# variables to ease testing while keeping sensible defaults in production.
_RECONNECT_INITIAL_DELAY_SECS = float(os.environ.get("MESH_RECONNECT_INITIAL", "5"))
_RECONNECT_MAX_DELAY_SECS = float(os.environ.get("MESH_RECONNECT_MAX", "60"))
class _DummySerialInterface:
"""In-memory replacement for ``meshtastic.serial_interface.SerialInterface``.
The GitHub Actions release tests run the ingestor container without access
to a serial device. When ``MESH_SERIAL`` is set to ``"mock"`` (or similar)
we provide this stub interface so the daemon can start and exercise its
background loop without failing due to missing hardware.
"""
def __init__(self):
self.nodes = {}
def close(self):
"""Mirror the real interface API."""
pass
def _parse_network_target(value: str) -> tuple[str, int] | None:
"""Return ``(host, port)`` when ``value`` is an IP address string.
The ingestor accepts values such as ``192.168.1.10`` or
``tcp://192.168.1.10:4500`` for ``MESH_SERIAL`` to support Meshtastic
devices shared via TCP. Serial device paths (``/dev/ttyACM0``) are ignored
by returning ``None``.
"""
if not value:
return None
value = value.strip()
if not value:
return None
def _validated_result(host: str | None, port: int | None):
if not host:
return None
try:
ipaddress.ip_address(host)
except ValueError:
return None
return host, port or _DEFAULT_TCP_PORT
parsed_values = []
if "://" in value:
parsed_values.append(urllib.parse.urlparse(value, scheme="tcp"))
parsed_values.append(urllib.parse.urlparse(f"//{value}", scheme="tcp"))
for parsed in parsed_values:
try:
port = parsed.port
except ValueError:
port = None
result = _validated_result(parsed.hostname, port)
if result:
return result
if value.count(":") == 1 and not value.startswith("["):
host, _, port_text = value.partition(":")
try:
port = int(port_text) if port_text else None
except ValueError:
port = None
result = _validated_result(host, port)
if result:
return result
return _validated_result(value, None)
def _create_serial_interface(port: str):
"""Return an appropriate serial interface for ``port``.
Passing ``mock`` (case-insensitive) or an empty value skips hardware access
and returns :class:`_DummySerialInterface`. This makes it possible to run
the container in CI environments that do not expose serial devices while
keeping production behaviour unchanged.
"""
port_value = (port or "").strip()
if port_value.lower() in {"", "mock", "none", "null", "disabled"}:
if DEBUG:
print(f"[debug] using dummy serial interface for port={port_value!r}")
return _DummySerialInterface()
network_target = _parse_network_target(port_value)
if network_target:
host, tcp_port = network_target
if DEBUG:
print("[debug] using TCP interface for host=" f"{host!r} port={tcp_port!r}")
return TCPInterface(hostname=host, portNumber=tcp_port)
return SerialInterface(devPath=port_value)
# --- POST queue ----------------------------------------------------------------
_POST_QUEUE_LOCK = threading.Lock()
_POST_QUEUE = []
_POST_QUEUE_COUNTER = itertools.count()
_POST_QUEUE_ACTIVE = False
_NODE_POST_PRIORITY = 0
_MESSAGE_POST_PRIORITY = 10
_MESSAGE_POST_PRIORITY = 0
_POSITION_POST_PRIORITY = 10
_NODE_POST_PRIORITY = 20
_DEFAULT_POST_PRIORITY = 50
_RECEIVE_TOPICS = (
"meshtastic.receive",
"meshtastic.receive.text",
"meshtastic.receive.position",
"meshtastic.receive.POSITION_APP",
"meshtastic.receive.user",
"meshtastic.receive.NODEINFO_APP",
)
def _get(obj, key, default=None):
"""Return a key or attribute value from ``obj``.
@@ -160,9 +279,22 @@ def _node_to_dict(n) -> dict:
if dataclasses.is_dataclass(value):
return {k: _convert(getattr(value, k)) for k in value.__dataclass_fields__}
if isinstance(value, ProtoMessage):
return MessageToDict(
value, preserving_proto_field_name=True, use_integers_for_enums=False
)
try:
return MessageToDict(
value,
preserving_proto_field_name=True,
use_integers_for_enums=False,
)
except Exception:
if hasattr(value, "to_dict"):
try:
return value.to_dict()
except Exception:
pass
try:
return json.loads(json.dumps(value, default=str))
except Exception:
return str(value)
if isinstance(value, bytes):
try:
return value.decode()
@@ -259,6 +391,64 @@ def _first(d, *names, default=None):
return default
def _coerce_int(value):
"""Return ``value`` converted to ``int`` when possible."""
if value is None:
return None
if isinstance(value, bool):
return int(value)
if isinstance(value, int):
return value
if isinstance(value, float):
return int(value) if math.isfinite(value) else None
if isinstance(value, (str, bytes, bytearray)):
text = value.decode() if isinstance(value, (bytes, bytearray)) else value
stripped = text.strip()
if not stripped:
return None
try:
if stripped.lower().startswith("0x"):
return int(stripped, 16)
return int(stripped, 10)
except ValueError:
try:
return int(float(stripped))
except ValueError:
return None
try:
return int(value)
except (TypeError, ValueError):
return None
def _coerce_float(value):
"""Return ``value`` converted to ``float`` when possible."""
if value is None:
return None
if isinstance(value, bool):
return float(value)
if isinstance(value, (int, float)):
result = float(value)
return result if math.isfinite(result) else None
if isinstance(value, (str, bytes, bytearray)):
text = value.decode() if isinstance(value, (bytes, bytearray)) else value
stripped = text.strip()
if not stripped:
return None
try:
result = float(stripped)
except ValueError:
return None
return result if math.isfinite(result) else None
try:
result = float(value)
except (TypeError, ValueError):
return None
return result if math.isfinite(result) else None
def _pkt_to_dict(packet) -> dict:
"""Normalise a received packet into a JSON-friendly dictionary.
@@ -271,9 +461,16 @@ def _pkt_to_dict(packet) -> dict:
if isinstance(packet, dict):
return packet
if isinstance(packet, ProtoMessage):
return MessageToDict(
packet, preserving_proto_field_name=True, use_integers_for_enums=False
)
try:
return MessageToDict(
packet, preserving_proto_field_name=True, use_integers_for_enums=False
)
except Exception:
if hasattr(packet, "to_dict"):
try:
return packet.to_dict()
except Exception:
pass
# Last resort: try to read attributes
try:
return json.loads(json.dumps(packet, default=lambda o: str(o)))
@@ -281,24 +478,605 @@ def _pkt_to_dict(packet) -> dict:
return {"_unparsed": str(packet)}
def store_packet_dict(p: dict):
"""Persist text messages extracted from a decoded packet.
def _canonical_node_id(value) -> str | None:
"""Normalise node identifiers to the canonical ``!deadbeef`` form."""
Only packets from the ``TEXT_MESSAGE_APP`` port are forwarded to the
web API. Field lookups tolerate camelCase and snake_case variants for
compatibility across Meshtastic releases.
if value is None:
return None
if isinstance(value, (int, float)):
try:
num = int(value)
except (TypeError, ValueError):
return None
if num < 0:
return None
return f"!{num & 0xFFFFFFFF:08x}"
if not isinstance(value, str):
return None
trimmed = value.strip()
if not trimmed:
return None
if trimmed.startswith("^"):
return trimmed
if trimmed.startswith("!"):
body = trimmed[1:]
elif trimmed.lower().startswith("0x"):
body = trimmed[2:]
elif trimmed.isdigit():
try:
return f"!{int(trimmed, 10) & 0xFFFFFFFF:08x}"
except ValueError:
return None
else:
body = trimmed
if not body:
return None
try:
return f"!{int(body, 16) & 0xFFFFFFFF:08x}"
except ValueError:
return None
def _node_num_from_id(node_id) -> int | None:
"""Return the numeric node reference derived from ``node_id``."""
if node_id is None:
return None
if isinstance(node_id, (int, float)):
try:
num = int(node_id)
except (TypeError, ValueError):
return None
return num if num >= 0 else None
if not isinstance(node_id, str):
return None
trimmed = node_id.strip()
if not trimmed:
return None
if trimmed.startswith("!"):
trimmed = trimmed[1:]
if trimmed.lower().startswith("0x"):
trimmed = trimmed[2:]
try:
return int(trimmed, 16)
except ValueError:
try:
return int(trimmed, 10)
except ValueError:
return None
def _merge_mappings(base, extra):
"""Recursively merge mapping ``extra`` into ``base`` without mutation."""
base_dict: dict
if isinstance(base, Mapping):
base_dict = dict(base)
elif base:
converted_base = _node_to_dict(base)
base_dict = dict(converted_base) if isinstance(converted_base, Mapping) else {}
else:
base_dict = {}
if not isinstance(extra, Mapping):
converted_extra = _node_to_dict(extra)
if not isinstance(converted_extra, Mapping):
return base_dict
extra = converted_extra
for key, value in extra.items():
if isinstance(value, Mapping):
existing = base_dict.get(key)
base_dict[key] = _merge_mappings(existing, value)
else:
base_dict[key] = _node_to_dict(value)
return base_dict
def _extract_payload_bytes(decoded_section: Mapping) -> bytes | None:
"""Extract raw payload bytes from a decoded packet section."""
if not isinstance(decoded_section, Mapping):
return None
payload = decoded_section.get("payload")
if isinstance(payload, Mapping):
data = payload.get("__bytes_b64__") or payload.get("bytes")
if isinstance(data, str):
try:
return base64.b64decode(data)
except Exception:
return None
if isinstance(payload, (bytes, bytearray)):
return bytes(payload)
if isinstance(payload, str):
try:
return base64.b64decode(payload)
except Exception:
return None
return None
def _decode_nodeinfo_payload(payload_bytes):
"""Return a ``NodeInfo`` protobuf message parsed from ``payload_bytes``."""
if not payload_bytes:
return None
try:
from meshtastic.protobuf import mesh_pb2
except Exception:
return None
node_info = mesh_pb2.NodeInfo()
try:
node_info.ParseFromString(payload_bytes)
return node_info
except DecodeError:
try:
user_msg = mesh_pb2.User()
user_msg.ParseFromString(payload_bytes)
except DecodeError:
return None
node_info = mesh_pb2.NodeInfo()
node_info.user.CopyFrom(user_msg)
return node_info
def _nodeinfo_metrics_dict(node_info) -> dict | None:
"""Convert ``NodeInfo.device_metrics`` into a JSON-friendly mapping."""
if not node_info:
return None
metrics_field_names = {f[0].name for f in node_info.ListFields()}
if "device_metrics" not in metrics_field_names:
return None
metrics = {}
for field_desc, value in node_info.device_metrics.ListFields():
name = field_desc.name
if name == "battery_level":
metrics["batteryLevel"] = float(value)
elif name == "voltage":
metrics["voltage"] = float(value)
elif name == "channel_utilization":
metrics["channelUtilization"] = float(value)
elif name == "air_util_tx":
metrics["airUtilTx"] = float(value)
elif name == "uptime_seconds":
metrics["uptimeSeconds"] = int(value)
return metrics if metrics else None
def _nodeinfo_position_dict(node_info) -> dict | None:
"""Convert ``NodeInfo.position`` into a dictionary with decoded coordinates."""
if not node_info:
return None
field_names = {f[0].name for f in node_info.ListFields()}
if "position" not in field_names:
return None
position = {}
for field_desc, value in node_info.position.ListFields():
name = field_desc.name
if name == "latitude_i":
position["latitude"] = float(value) / 1e7
elif name == "longitude_i":
position["longitude"] = float(value) / 1e7
elif name == "altitude":
position["altitude"] = float(value)
elif name == "time":
position["time"] = int(value)
elif name == "location_source":
try:
from meshtastic.protobuf import mesh_pb2
position["locationSource"] = mesh_pb2.Position.LocSource.Name(value)
except Exception:
position["locationSource"] = value
return position if position else None
def _nodeinfo_user_dict(node_info, decoded_user) -> dict | None:
"""Merge user details from the decoded packet and NodeInfo payload."""
user_dict = None
if node_info:
field_names = {f[0].name for f in node_info.ListFields()}
if "user" in field_names:
try:
from google.protobuf.json_format import MessageToDict
user_dict = MessageToDict(
node_info.user,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
except Exception:
user_dict = None
if isinstance(decoded_user, ProtoMessage):
try:
from google.protobuf.json_format import MessageToDict
decoded_user = MessageToDict(
decoded_user,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
except Exception:
decoded_user = _node_to_dict(decoded_user)
if isinstance(decoded_user, Mapping):
user_dict = _merge_mappings(user_dict, decoded_user)
if isinstance(user_dict, Mapping):
canonical = _canonical_node_id(user_dict.get("id"))
if canonical:
user_dict = dict(user_dict)
user_dict["id"] = canonical
return user_dict
def store_position_packet(packet: dict, decoded: Mapping):
"""Handle ``POSITION_APP`` packets and forward them to ``/api/positions``."""
node_ref = _first(packet, "fromId", "from_id", "from", default=None)
if node_ref is None:
node_ref = _first(decoded, "num", default=None)
node_id = _canonical_node_id(node_ref)
if node_id is None:
return
node_num = _coerce_int(_first(decoded, "num", default=None))
if node_num is None:
node_num = _node_num_from_id(node_id)
pkt_id = _coerce_int(_first(packet, "id", "packet_id", "packetId", default=None))
if pkt_id is None:
return
rx_time = _coerce_int(_first(packet, "rxTime", "rx_time", default=time.time()))
if rx_time is None:
rx_time = int(time.time())
to_id = _first(packet, "toId", "to_id", "to", default=None)
to_id = to_id if to_id not in {"", None} else None
position_section = decoded.get("position") if isinstance(decoded, Mapping) else None
if not isinstance(position_section, Mapping):
position_section = {}
latitude = _coerce_float(
_first(position_section, "latitude", "raw.latitude", default=None)
)
if latitude is None:
lat_i = _coerce_int(
_first(
position_section,
"latitudeI",
"latitude_i",
"raw.latitude_i",
default=None,
)
)
if lat_i is not None:
latitude = lat_i / 1e7
longitude = _coerce_float(
_first(position_section, "longitude", "raw.longitude", default=None)
)
if longitude is None:
lon_i = _coerce_int(
_first(
position_section,
"longitudeI",
"longitude_i",
"raw.longitude_i",
default=None,
)
)
if lon_i is not None:
longitude = lon_i / 1e7
altitude = _coerce_float(
_first(position_section, "altitude", "raw.altitude", default=None)
)
position_time = _coerce_int(
_first(position_section, "time", "raw.time", default=None)
)
location_source = _first(
position_section,
"locationSource",
"location_source",
"raw.location_source",
default=None,
)
location_source = (
str(location_source).strip() if location_source not in {None, ""} else None
)
precision_bits = _coerce_int(
_first(
position_section,
"precisionBits",
"precision_bits",
"raw.precision_bits",
default=None,
)
)
sats_in_view = _coerce_int(
_first(
position_section,
"satsInView",
"sats_in_view",
"raw.sats_in_view",
default=None,
)
)
pdop = _coerce_float(
_first(position_section, "PDOP", "pdop", "raw.PDOP", "raw.pdop", default=None)
)
ground_speed = _coerce_float(
_first(
position_section,
"groundSpeed",
"ground_speed",
"raw.ground_speed",
default=None,
)
)
ground_track = _coerce_float(
_first(
position_section,
"groundTrack",
"ground_track",
"raw.ground_track",
default=None,
)
)
snr = _coerce_float(_first(packet, "snr", "rx_snr", "rxSnr", default=None))
rssi = _coerce_int(_first(packet, "rssi", "rx_rssi", "rxRssi", default=None))
hop_limit = _coerce_int(_first(packet, "hopLimit", "hop_limit", default=None))
bitfield = _coerce_int(_first(decoded, "bitfield", default=None))
payload_bytes = _extract_payload_bytes(decoded)
payload_b64 = (
base64.b64encode(payload_bytes).decode("ascii") if payload_bytes else None
)
raw_section = decoded.get("raw") if isinstance(decoded, Mapping) else None
raw_payload = _node_to_dict(raw_section) if raw_section else None
if raw_payload is None and position_section:
raw_position = (
position_section.get("raw")
if isinstance(position_section, Mapping)
else None
)
if raw_position:
raw_payload = _node_to_dict(raw_position)
position_payload = {
"id": pkt_id,
"node_id": node_id,
"node_num": node_num,
"num": node_num,
"from_id": node_id,
"to_id": to_id,
"rx_time": rx_time,
"rx_iso": _iso(rx_time),
"latitude": latitude,
"longitude": longitude,
"altitude": altitude,
"position_time": position_time,
"location_source": location_source,
"precision_bits": precision_bits,
"sats_in_view": sats_in_view,
"pdop": pdop,
"ground_speed": ground_speed,
"ground_track": ground_track,
"snr": snr,
"rssi": rssi,
"hop_limit": hop_limit,
"bitfield": bitfield,
"payload_b64": payload_b64,
}
if raw_payload:
position_payload["raw"] = raw_payload
_queue_post_json(
"/api/positions", position_payload, priority=_POSITION_POST_PRIORITY
)
if DEBUG:
print(
f"[debug] stored position for {node_id} lat={latitude!r} lon={longitude!r} rx_time={rx_time}"
)
def store_nodeinfo_packet(packet: dict, decoded: Mapping):
"""Handle ``NODEINFO_APP`` packets and forward them to ``/api/nodes``."""
payload_bytes = _extract_payload_bytes(decoded)
node_info = _decode_nodeinfo_payload(payload_bytes)
decoded_user = decoded.get("user")
user_dict = _nodeinfo_user_dict(node_info, decoded_user)
node_info_fields = set()
if node_info:
node_info_fields = {field_desc.name for field_desc, _ in node_info.ListFields()}
node_id = None
if isinstance(user_dict, Mapping):
node_id = _canonical_node_id(user_dict.get("id"))
if node_id is None:
node_id = _canonical_node_id(
_first(packet, "fromId", "from_id", "from", default=None)
)
if node_id is None:
return
node_payload = {}
if user_dict:
node_payload["user"] = user_dict
node_num = None
if node_info and "num" in node_info_fields:
try:
node_num = int(node_info.num)
except (TypeError, ValueError):
node_num = None
if node_num is None:
decoded_num = decoded.get("num")
if decoded_num is not None:
try:
node_num = int(decoded_num)
except (TypeError, ValueError):
try:
node_num = int(str(decoded_num).strip(), 0)
except Exception:
node_num = None
if node_num is None:
node_num = _node_num_from_id(node_id)
if node_num is not None:
node_payload["num"] = node_num
rx_time = int(_first(packet, "rxTime", "rx_time", default=time.time()))
last_heard = None
if node_info and "last_heard" in node_info_fields:
try:
last_heard = int(node_info.last_heard)
except (TypeError, ValueError):
last_heard = None
if last_heard is None:
decoded_last_heard = decoded.get("lastHeard")
if decoded_last_heard is not None:
try:
last_heard = int(decoded_last_heard)
except (TypeError, ValueError):
last_heard = None
if last_heard is None or last_heard < rx_time:
last_heard = rx_time
node_payload["lastHeard"] = last_heard
snr = None
if node_info and "snr" in node_info_fields:
try:
snr = float(node_info.snr)
except (TypeError, ValueError):
snr = None
if snr is None:
snr = _first(packet, "snr", "rx_snr", "rxSnr", default=None)
if snr is not None:
try:
snr = float(snr)
except (TypeError, ValueError):
snr = None
if snr is not None:
node_payload["snr"] = snr
hops = None
if node_info and "hops_away" in node_info_fields:
try:
hops = int(node_info.hops_away)
except (TypeError, ValueError):
hops = None
if hops is None:
hops = decoded.get("hopsAway")
if hops is not None:
try:
hops = int(hops)
except (TypeError, ValueError):
hops = None
if hops is not None:
node_payload["hopsAway"] = hops
if node_info and "channel" in node_info_fields:
try:
node_payload["channel"] = int(node_info.channel)
except (TypeError, ValueError):
pass
if node_info and "via_mqtt" in node_info_fields:
node_payload["viaMqtt"] = bool(node_info.via_mqtt)
if node_info and "is_favorite" in node_info_fields:
node_payload["isFavorite"] = bool(node_info.is_favorite)
elif "isFavorite" in decoded:
node_payload["isFavorite"] = bool(decoded.get("isFavorite"))
if node_info and "is_ignored" in node_info_fields:
node_payload["isIgnored"] = bool(node_info.is_ignored)
if node_info and "is_key_manually_verified" in node_info_fields:
node_payload["isKeyManuallyVerified"] = bool(node_info.is_key_manually_verified)
metrics = _nodeinfo_metrics_dict(node_info)
decoded_metrics = decoded.get("deviceMetrics")
if isinstance(decoded_metrics, Mapping):
metrics = _merge_mappings(metrics, _node_to_dict(decoded_metrics))
if metrics:
node_payload["deviceMetrics"] = metrics
position = _nodeinfo_position_dict(node_info)
decoded_position = decoded.get("position")
if isinstance(decoded_position, Mapping):
position = _merge_mappings(position, _node_to_dict(decoded_position))
if position:
node_payload["position"] = position
hop_limit = _first(packet, "hopLimit", "hop_limit", default=None)
if hop_limit is not None and "hopLimit" not in node_payload:
try:
node_payload["hopLimit"] = int(hop_limit)
except (TypeError, ValueError):
pass
_queue_post_json(
"/api/nodes", {node_id: node_payload}, priority=_NODE_POST_PRIORITY
)
if DEBUG:
short = None
if isinstance(user_dict, Mapping):
short = user_dict.get("shortName")
print(f"[debug] stored nodeinfo for {node_id} shortName={short!r}")
def store_packet_dict(p: dict):
"""Persist packets extracted from a decoded payload.
Node information packets are forwarded to the ``/api/nodes`` endpoint
while text messages from the ``TEXT_MESSAGE_APP`` port continue to be
stored via ``/api/messages``. Field lookups tolerate camelCase and
snake_case variants for compatibility across Meshtastic releases.
Args:
p: Packet dictionary produced by ``_pkt_to_dict``.
"""
dec = p.get("decoded") or {}
text = _first(dec, "payload.text", "text", default=None)
if not text:
return # ignore non-text packets
# port filter: only keep packets from the TEXT_MESSAGE_APP port
portnum_raw = _first(dec, "portnum", default=None)
portnum = str(portnum_raw).upper() if portnum_raw is not None else None
if portnum in {"5", "NODEINFO_APP"}:
store_nodeinfo_packet(p, dec)
return
if portnum in {"4", "POSITION_APP"}:
store_position_packet(p, dec)
return
text = _first(dec, "payload.text", "text", default=None)
encrypted = _first(dec, "payload.encrypted", "encrypted", default=None)
if encrypted is None:
encrypted = _first(p, "encrypted", default=None)
if not text and not encrypted:
return # ignore packets that lack text and encrypted payloads
# port filter: only keep packets from the TEXT_MESSAGE_APP port
if portnum and portnum not in {"1", "TEXT_MESSAGE_APP"}:
return # ignore non-text-message ports
@@ -340,6 +1118,7 @@ def store_packet_dict(p: dict):
"channel": ch,
"portnum": str(portnum) if portnum is not None else None,
"text": text,
"encrypted": encrypted,
"snr": float(snr) if snr is not None else None,
"rssi": int(rssi) if rssi is not None else None,
"hop_limit": int(hop) if hop is not None else None,
@@ -361,6 +1140,11 @@ def on_receive(packet, interface):
interface: Serial interface instance (unused).
"""
if isinstance(packet, dict):
if packet.get("_potatomesh_seen"):
return
packet["_potatomesh_seen"] = True
p = None
try:
p = _pkt_to_dict(packet)
@@ -370,6 +1154,20 @@ def on_receive(packet, interface):
print(f"[warn] failed to store packet: {e} | info: {info}")
def _subscribe_receive_topics() -> list[str]:
"""Subscribe ``on_receive`` to relevant PubSub topics."""
subscribed = []
for topic in _RECEIVE_TOPICS:
try:
pub.subscribe(on_receive, topic)
subscribed.append(topic)
except Exception as exc: # pragma: no cover - pub may raise in prod only
if DEBUG:
print(f"[debug] failed to subscribe to {topic!r}: {exc}")
return subscribed
# --- Main ---------------------------------------------------------------------
def _node_items_snapshot(nodes_obj, retries: int = 3):
"""Return a snapshot list of ``(node_id, node)`` pairs.
@@ -420,9 +1218,20 @@ def main():
"""Run the mesh synchronisation daemon."""
# Subscribe to PubSub topics (reliable in current meshtastic)
pub.subscribe(on_receive, "meshtastic.receive")
subscribed = _subscribe_receive_topics()
if DEBUG and subscribed:
print(f"[debug] subscribed to receive topics: {', '.join(subscribed)}")
iface = SerialInterface(devPath=PORT)
def _close_interface(iface_obj):
if iface_obj is None:
return
try:
iface_obj.close()
except Exception:
pass
iface = None
retry_delay = max(0.0, _RECONNECT_INITIAL_DELAY_SECS)
stop = threading.Event()
@@ -439,6 +1248,24 @@ def main():
f"Mesh daemon: nodes+messages → {target} | port={PORT} | channel={CHANNEL_INDEX}"
)
while not stop.is_set():
if iface is None:
try:
iface = _create_serial_interface(PORT)
retry_delay = max(0.0, _RECONNECT_INITIAL_DELAY_SECS)
except Exception as exc:
print(f"[warn] failed to create mesh interface: {exc}")
stop.wait(retry_delay)
if _RECONNECT_MAX_DELAY_SECS > 0:
retry_delay = min(
(
retry_delay * 2
if retry_delay
else _RECONNECT_INITIAL_DELAY_SECS
),
_RECONNECT_MAX_DELAY_SECS,
)
continue
try:
nodes = getattr(iface, "nodes", {}) or {}
node_items = _node_items_snapshot(nodes)
@@ -459,12 +1286,20 @@ def main():
print(f"[debug] node object: {n!r}")
except Exception as e:
print(f"[warn] failed to update node snapshot: {e}")
_close_interface(iface)
iface = None
stop.wait(retry_delay)
if _RECONNECT_MAX_DELAY_SECS > 0:
retry_delay = min(
retry_delay * 2 if retry_delay else _RECONNECT_INITIAL_DELAY_SECS,
_RECONNECT_MAX_DELAY_SECS,
)
continue
retry_delay = max(0.0, _RECONNECT_INITIAL_DELAY_SECS)
stop.wait(SNAPSHOT_SECS)
try:
iface.close()
except Exception:
pass
_close_interface(iface)
if __name__ == "__main__":

View File

@@ -21,10 +21,10 @@ CREATE TABLE IF NOT EXISTS messages (
channel INTEGER,
portnum TEXT,
text TEXT,
encrypted TEXT,
snr REAL,
rssi INTEGER,
hop_limit INTEGER,
raw_json TEXT
hop_limit INTEGER
);
CREATE INDEX IF NOT EXISTS idx_messages_rx_time ON messages(rx_time);

View File

@@ -0,0 +1,4 @@
-- Add support for encrypted messages to the existing schema.
BEGIN;
ALTER TABLE messages ADD COLUMN encrypted TEXT;
COMMIT;

40
data/positions.sql Normal file
View File

@@ -0,0 +1,40 @@
-- Copyright (C) 2025 l5yth
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE TABLE IF NOT EXISTS positions (
id INTEGER PRIMARY KEY,
node_id TEXT,
node_num INTEGER,
rx_time INTEGER NOT NULL,
rx_iso TEXT NOT NULL,
position_time INTEGER,
to_id TEXT,
latitude REAL,
longitude REAL,
altitude REAL,
location_source TEXT,
precision_bits INTEGER,
sats_in_view INTEGER,
pdop REAL,
ground_speed REAL,
ground_track REAL,
snr REAL,
rssi INTEGER,
hop_limit INTEGER,
bitfield INTEGER,
payload_b64 TEXT
);
CREATE INDEX IF NOT EXISTS idx_positions_rx_time ON positions(rx_time);
CREATE INDEX IF NOT EXISTS idx_positions_node_id ON positions(node_id);

7
data/requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
# Production dependencies
meshtastic>=2.0.0
protobuf>=4.21.12
# Development dependencies (optional)
black>=23.0.0
pytest>=7.0.0

34
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,34 @@
# Development overrides for docker-compose.yml
services:
web:
environment:
DEBUG: 1
volumes:
- ./web:/app
- ./data:/data
- /app/vendor/bundle
web-bridge:
environment:
DEBUG: 1
volumes:
- ./web:/app
- ./data:/data
- /app/vendor/bundle
ports:
- "41447:41447"
- "9292:9292"
ingestor:
environment:
DEBUG: 1
volumes:
- ./data:/app
- /app/.local
ingestor-bridge:
environment:
DEBUG: 1
volumes:
- ./data:/app
- /app/.local

29
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,29 @@
# Production overrides for docker-compose.yml
services:
web:
build:
target: production
environment:
DEBUG: 0
restart: always
web-bridge:
build:
target: production
environment:
DEBUG: 0
restart: always
ingestor:
build:
target: production
environment:
DEBUG: 0
restart: always
ingestor-bridge:
build:
target: production
environment:
DEBUG: 0
restart: always

92
docker-compose.yml Normal file
View File

@@ -0,0 +1,92 @@
x-web-base: &web-base
image: ghcr.io/l5yth/potato-mesh-web-linux-amd64:latest
environment:
SITE_NAME: ${SITE_NAME:-My Meshtastic Network}
DEFAULT_CHANNEL: ${DEFAULT_CHANNEL:-#MediumFast}
DEFAULT_FREQUENCY: ${DEFAULT_FREQUENCY:-868MHz}
MAP_CENTER_LAT: ${MAP_CENTER_LAT:-52.502889}
MAP_CENTER_LON: ${MAP_CENTER_LON:-13.404194}
MAX_NODE_DISTANCE_KM: ${MAX_NODE_DISTANCE_KM:-50}
MATRIX_ROOM: ${MATRIX_ROOM:-}
API_TOKEN: ${API_TOKEN}
DEBUG: ${DEBUG:-0}
volumes:
- potatomesh_data:/app/data
- potatomesh_logs:/app/logs
restart: unless-stopped
deploy:
resources:
limits:
memory: 512M
cpus: '0.5'
reservations:
memory: 256M
cpus: '0.25'
x-ingestor-base: &ingestor-base
image: ghcr.io/l5yth/potato-mesh-ingestor-linux-amd64:latest
environment:
MESH_SERIAL: ${MESH_SERIAL:-/dev/ttyACM0}
MESH_SNAPSHOT_SECS: ${MESH_SNAPSHOT_SECS:-60}
MESH_CHANNEL_INDEX: ${MESH_CHANNEL_INDEX:-0}
POTATOMESH_INSTANCE: ${POTATOMESH_INSTANCE:-http://web:41447}
API_TOKEN: ${API_TOKEN}
DEBUG: ${DEBUG:-0}
volumes:
- potatomesh_data:/app/data
- potatomesh_logs:/app/logs
devices:
- ${MESH_SERIAL:-/dev/ttyACM0}:${MESH_SERIAL:-/dev/ttyACM0}
privileged: false
restart: unless-stopped
deploy:
resources:
limits:
memory: 256M
cpus: '0.25'
reservations:
memory: 128M
cpus: '0.1'
services:
web:
<<: *web-base
network_mode: host
ingestor:
<<: *ingestor-base
network_mode: host
depends_on:
- web
extra_hosts:
- "web:127.0.0.1"
web-bridge:
<<: *web-base
container_name: potatomesh-web-bridge
networks:
- potatomesh-network
ports:
- "41447:41447"
profiles:
- bridge
ingestor-bridge:
<<: *ingestor-base
container_name: potatomesh-ingestor-bridge
networks:
- potatomesh-network
depends_on:
- web-bridge
profiles:
- bridge
volumes:
potatomesh_data:
driver: local
potatomesh_logs:
driver: local
networks:
potatomesh-network:
driver: bridge

BIN
scrot-0.3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 952 KiB

77
tests/dump.py Normal file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
import json, os, signal, sys, time, threading
from datetime import datetime, timezone
from meshtastic.serial_interface import SerialInterface
from meshtastic.mesh_interface import MeshInterface
from pubsub import pub
PORT = os.environ.get("MESH_SERIAL", "/dev/ttyACM0")
OUT = os.environ.get("MESH_DUMP_FILE", "meshtastic-dump.ndjson")
# line-buffered append so you can tail -f safely
f = open(OUT, "a", buffering=1, encoding="utf-8")
def now():
return datetime.now(timezone.utc).isoformat()
def write(kind, payload):
rec = {"ts": now(), "kind": kind, **payload}
f.write(json.dumps(rec, ensure_ascii=False, default=str) + "\n")
# Connect to the node
iface: MeshInterface = SerialInterface(PORT)
# Packet callback: every RF/Mesh packet the node receives/decodes lands here
def on_packet(packet, iface):
# 'packet' already includes decoded fields when available (portnum, payload, position, telemetry, etc.)
write("packet", {"packet": packet})
# Node callback: topology/metadata updates (nodeinfo, hops, lastHeard, etc.)
def on_node(node, iface):
write("node", {"node": node})
iface.onReceive = on_packet
pub.subscribe(on_node, "meshtastic.node")
# Write a little header so you know what you captured
try:
my = getattr(iface, "myInfo", None)
write(
"meta",
{
"event": "started",
"port": PORT,
"my_node_num": getattr(my, "my_node_num", None) if my else None,
},
)
except Exception as e:
write("meta", {"event": "started", "port": PORT, "error": str(e)})
# Keep the process alive until Ctrl-C
def _stop(signum, frame):
write("meta", {"event": "stopping"})
try:
try:
pub.unsubscribe(on_node, "meshtastic.node")
except Exception:
pass
iface.close()
finally:
f.close()
sys.exit(0)
signal.signal(signal.SIGINT, _stop)
signal.signal(signal.SIGTERM, _stop)
# Simple sleep loop; avoids busy-wait
while True:
time.sleep(1)

View File

@@ -13,7 +13,6 @@
"snr": -13.25,
"node": {
"snr": -13.25,
"raw_json": null,
"node_id": "!bba83318",
"num": 3148362520,
"short_name": "BerF",
@@ -53,7 +52,6 @@
"snr": -12.0,
"node": {
"snr": -12.0,
"raw_json": null,
"node_id": "!43b6e530",
"num": 1136059696,
"short_name": "FFSR",
@@ -93,7 +91,6 @@
"snr": -13.5,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!d42e18e8",
"num": 3559790824,
"short_name": "RRun",
@@ -133,7 +130,6 @@
"snr": -13.0,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!d42e18e8",
"num": 3559790824,
"short_name": "RRun",
@@ -173,7 +169,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -213,7 +208,6 @@
"snr": 11.25,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!4ed36bd0",
"num": 1322478544,
"short_name": "RDM",
@@ -253,7 +247,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -293,7 +286,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -333,7 +325,6 @@
"snr": 12.0,
"node": {
"snr": 12.0,
"raw_json": null,
"node_id": "!b03c97a4",
"num": 2956760996,
"short_name": "BLN1",
@@ -373,7 +364,6 @@
"snr": -15.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9eeb25ec",
"num": 2666210796,
"short_name": "25ec",
@@ -413,7 +403,6 @@
"snr": 11.25,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!f9b0938c",
"num": 4189098892,
"short_name": "Ed-1",
@@ -453,7 +442,6 @@
"snr": 11.25,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!6c73bf84",
"num": 1819524996,
"short_name": "ts1",
@@ -493,7 +481,6 @@
"snr": 11.25,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -533,7 +520,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -573,7 +559,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!6cf821fb",
"num": 1828200955,
"short_name": "OKP1",
@@ -613,7 +598,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!6cf821fb",
"num": 1828200955,
"short_name": "OKP1",
@@ -653,7 +637,6 @@
"snr": 10.5,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -693,7 +676,6 @@
"snr": 10.25,
"node": {
"snr": 10.25,
"raw_json": null,
"node_id": "!db2b23f4",
"num": 3677037556,
"short_name": "Eagl",
@@ -733,7 +715,6 @@
"snr": 11.25,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!6cf821fb",
"num": 1828200955,
"short_name": "OKP1",
@@ -773,7 +754,6 @@
"snr": 11.0,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -813,7 +793,6 @@
"snr": -11.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!177cfa26",
"num": 394066470,
"short_name": "lun1",
@@ -853,7 +832,6 @@
"snr": 11.25,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!9ea0c780",
"num": 2661336960,
"short_name": "nguE",
@@ -893,7 +871,6 @@
"snr": 10.75,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -933,7 +910,6 @@
"snr": 11.5,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!e80cda12",
"num": 3893156370,
"short_name": "mowW",
@@ -973,7 +949,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -1013,7 +988,6 @@
"snr": 11.5,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -1053,7 +1027,6 @@
"snr": 11.5,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -1093,7 +1066,6 @@
"snr": -11.75,
"node": {
"snr": -9.75,
"raw_json": null,
"node_id": "!a0cb1608",
"num": 2697664008,
"short_name": "KBV5",
@@ -1133,7 +1105,6 @@
"snr": 10.75,
"node": {
"snr": 10.25,
"raw_json": null,
"node_id": "!bcf10936",
"num": 3169913142,
"short_name": "0936",
@@ -1173,7 +1144,6 @@
"snr": 11.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -1213,7 +1183,6 @@
"snr": -13.25,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!a0cc6904",
"num": 2697750788,
"short_name": "Kdû",
@@ -1253,7 +1222,6 @@
"snr": 10.5,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -1293,7 +1261,6 @@
"snr": 11.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9eeb25ec",
"num": 2666210796,
"short_name": "25ec",
@@ -1333,7 +1300,6 @@
"snr": -14.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!a0cc6904",
"num": 2697750788,
"short_name": "Kdû",
@@ -1373,7 +1339,6 @@
"snr": 11.25,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9eeb25ec",
"num": 2666210796,
"short_name": "25ec",
@@ -1413,7 +1378,6 @@
"snr": 11.5,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9eeb25ec",
"num": 2666210796,
"short_name": "25ec",
@@ -1453,7 +1417,6 @@
"snr": 11.75,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9eeb25ec",
"num": 2666210796,
"short_name": "25ec",
@@ -1493,7 +1456,6 @@
"snr": 11.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -1533,7 +1495,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!03b9ca11",
"num": 62507537,
"short_name": "ca11",
@@ -1573,7 +1534,6 @@
"snr": 7.5,
"node": {
"snr": 10.25,
"raw_json": null,
"node_id": "!db2b23f4",
"num": 3677037556,
"short_name": "Eagl",
@@ -1613,7 +1573,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -1653,7 +1612,6 @@
"snr": 10.75,
"node": {
"snr": 10.25,
"raw_json": null,
"node_id": "!db2b23f4",
"num": 3677037556,
"short_name": "Eagl",
@@ -1693,7 +1651,6 @@
"snr": 10.75,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -1733,7 +1690,6 @@
"snr": 10.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -1773,7 +1729,6 @@
"snr": 10.5,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -1813,7 +1768,6 @@
"snr": 11.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!a0cc6904",
"num": 2697750788,
"short_name": "Kdû",
@@ -1853,7 +1807,6 @@
"snr": -12.25,
"node": {
"snr": -12.25,
"raw_json": null,
"node_id": "!2f945044",
"num": 798249028,
"short_name": "BND",
@@ -1893,7 +1846,6 @@
"snr": 11.0,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -1933,7 +1885,6 @@
"snr": 10.5,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -1973,7 +1924,6 @@
"snr": 10.75,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -2013,7 +1963,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -2053,7 +2002,6 @@
"snr": 10.5,
"node": {
"snr": -6.25,
"raw_json": null,
"node_id": "!7c5b0920",
"num": 2086340896,
"short_name": "FFTB",
@@ -2093,7 +2041,6 @@
"snr": 10.25,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -2133,7 +2080,6 @@
"snr": 11.25,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!9ea0c780",
"num": 2661336960,
"short_name": "nguE",
@@ -2173,7 +2119,6 @@
"snr": 10.75,
"node": {
"snr": -12.75,
"raw_json": null,
"node_id": "!0910c922",
"num": 152095010,
"short_name": "c922",
@@ -2213,7 +2158,6 @@
"snr": 11.0,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -2253,7 +2197,6 @@
"snr": 11.0,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!9ee71430",
"num": 2665944112,
"short_name": "FiSp",
@@ -2293,7 +2236,6 @@
"snr": 11.5,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -2333,7 +2275,6 @@
"snr": 10.75,
"node": {
"snr": 10.25,
"raw_json": null,
"node_id": "!bcf10936",
"num": 3169913142,
"short_name": "0936",
@@ -2373,7 +2314,6 @@
"snr": 11.0,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!16ced364",
"num": 382653284,
"short_name": "Pat",
@@ -2413,7 +2353,6 @@
"snr": 11.25,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -2453,7 +2392,6 @@
"snr": 10.5,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -2493,7 +2431,6 @@
"snr": 10.25,
"node": {
"snr": 10.0,
"raw_json": null,
"node_id": "!a3deea53",
"num": 2749295187,
"short_name": "🐸",
@@ -2533,7 +2470,6 @@
"snr": 9.0,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!9ea0c780",
"num": 2661336960,
"short_name": "nguE",
@@ -2573,7 +2509,6 @@
"snr": 11.5,
"node": {
"snr": -13.25,
"raw_json": null,
"node_id": "!bba83318",
"num": 3148362520,
"short_name": "BerF",
@@ -2613,7 +2548,6 @@
"snr": 9.25,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -2653,7 +2587,6 @@
"snr": 10.25,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!e80cda12",
"num": 3893156370,
"short_name": "mowW",
@@ -2693,7 +2626,6 @@
"snr": -5.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!a0cc6904",
"num": 2697750788,
"short_name": "Kdû",
@@ -2733,7 +2665,6 @@
"snr": 11.0,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!e80cda12",
"num": 3893156370,
"short_name": "mowW",
@@ -2773,7 +2704,6 @@
"snr": 0.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -2813,7 +2743,6 @@
"snr": 11.25,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -2853,7 +2782,6 @@
"snr": 11.5,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -2893,7 +2821,6 @@
"snr": 10.0,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!16ced364",
"num": 382653284,
"short_name": "Pat",
@@ -2933,7 +2860,6 @@
"snr": 11.0,
"node": {
"snr": -9.75,
"raw_json": null,
"node_id": "!a0cb1608",
"num": 2697664008,
"short_name": "KBV5",
@@ -2973,7 +2899,6 @@
"snr": 9.5,
"node": {
"snr": -9.75,
"raw_json": null,
"node_id": "!a0cb1608",
"num": 2697664008,
"short_name": "KBV5",
@@ -3013,7 +2938,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -3053,7 +2977,6 @@
"snr": 11.0,
"node": {
"snr": -12.0,
"raw_json": null,
"node_id": "!43b6e530",
"num": 1136059696,
"short_name": "FFSR",
@@ -3093,7 +3016,6 @@
"snr": 11.0,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!e80cda12",
"num": 3893156370,
"short_name": "mowW",
@@ -3133,7 +3055,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -3173,7 +3094,6 @@
"snr": 10.25,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!16ced364",
"num": 382653284,
"short_name": "Pat",
@@ -3213,7 +3133,6 @@
"snr": 10.5,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -3253,7 +3172,6 @@
"snr": 10.75,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",
@@ -3293,7 +3211,6 @@
"snr": 11.0,
"node": {
"snr": 11.0,
"raw_json": null,
"node_id": "!abbdf3f7",
"num": 2881352695,
"short_name": "f3f7",
@@ -3333,7 +3250,6 @@
"snr": 10.5,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!c0c32348",
"num": 3234014024,
"short_name": "CooP",
@@ -3373,7 +3289,6 @@
"snr": 11.0,
"node": {
"snr": 11.25,
"raw_json": null,
"node_id": "!16ced364",
"num": 382653284,
"short_name": "Pat",
@@ -3413,7 +3328,6 @@
"snr": 10.5,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -3453,7 +3367,6 @@
"snr": -12.5,
"node": {
"snr": -9.75,
"raw_json": null,
"node_id": "!a0cb1608",
"num": 2697664008,
"short_name": "KBV5",
@@ -3493,7 +3406,6 @@
"snr": 11.0,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!da635e24",
"num": 3663945252,
"short_name": "LAN",
@@ -3533,7 +3445,6 @@
"snr": -8.75,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -3573,7 +3484,6 @@
"snr": 10.25,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!5d823fb1",
"num": 1568817073,
"short_name": "3fb1",
@@ -3613,7 +3523,6 @@
"snr": 11.25,
"node": {
"snr": -12.0,
"raw_json": null,
"node_id": "!43b6e530",
"num": 1136059696,
"short_name": "FFSR",
@@ -3653,7 +3562,6 @@
"snr": 11.0,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!849a8ba4",
"num": 2224720804,
"short_name": "MGN1",
@@ -3693,7 +3601,6 @@
"snr": -13.25,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!849a8ba4",
"num": 2224720804,
"short_name": "MGN1",
@@ -3733,7 +3640,6 @@
"snr": 10.75,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!9c93a2df",
"num": 2626921183,
"short_name": "xaRa",
@@ -3773,7 +3679,6 @@
"snr": 11.25,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -3813,7 +3718,6 @@
"snr": 11.0,
"node": {
"snr": 11.5,
"raw_json": null,
"node_id": "!9ee71c38",
"num": 2665946168,
"short_name": "1c38",
@@ -3853,7 +3757,6 @@
"snr": 11.0,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!5d823fb1",
"num": 1568817073,
"short_name": "3fb1",
@@ -3893,7 +3796,6 @@
"snr": 11.0,
"node": {
"snr": 10.5,
"raw_json": null,
"node_id": "!6c73bf84",
"num": 1819524996,
"short_name": "ts1",
@@ -3933,7 +3835,6 @@
"snr": 11.25,
"node": {
"snr": null,
"raw_json": null,
"node_id": null,
"num": null,
"short_name": null,
@@ -3973,7 +3874,6 @@
"snr": 11.25,
"node": {
"snr": 10.75,
"raw_json": null,
"node_id": "!194a7351",
"num": 424309585,
"short_name": "l5y7",

View File

@@ -1,3 +1,4 @@
import base64
import importlib
import sys
import types
@@ -15,6 +16,15 @@ def mesh_module(monkeypatch):
repo_root = Path(__file__).resolve().parents[1]
monkeypatch.syspath_prepend(str(repo_root))
try:
import meshtastic as real_meshtastic # type: ignore
except Exception: # pragma: no cover - dependency may be unavailable in CI
real_meshtastic = None
real_protobuf = (
getattr(real_meshtastic, "protobuf", None) if real_meshtastic else None
)
# Stub meshtastic.serial_interface.SerialInterface
serial_interface_mod = types.ModuleType("meshtastic.serial_interface")
@@ -27,13 +37,30 @@ def mesh_module(monkeypatch):
serial_interface_mod.SerialInterface = DummySerialInterface
tcp_interface_mod = types.ModuleType("meshtastic.tcp_interface")
class DummyTCPInterface:
def __init__(self, *_, **__):
self.closed = False
def close(self):
self.closed = True
tcp_interface_mod.TCPInterface = DummyTCPInterface
meshtastic_mod = types.ModuleType("meshtastic")
meshtastic_mod.serial_interface = serial_interface_mod
meshtastic_mod.tcp_interface = tcp_interface_mod
if real_protobuf is not None:
meshtastic_mod.protobuf = real_protobuf
monkeypatch.setitem(sys.modules, "meshtastic", meshtastic_mod)
monkeypatch.setitem(
sys.modules, "meshtastic.serial_interface", serial_interface_mod
)
monkeypatch.setitem(sys.modules, "meshtastic.tcp_interface", tcp_interface_mod)
if real_protobuf is not None:
monkeypatch.setitem(sys.modules, "meshtastic.protobuf", real_protobuf)
# Stub pubsub.pub
pubsub_mod = types.ModuleType("pubsub")
@@ -48,36 +75,47 @@ def mesh_module(monkeypatch):
pubsub_mod.pub = DummyPub()
monkeypatch.setitem(sys.modules, "pubsub", pubsub_mod)
# Stub google.protobuf modules used by mesh.py
json_format_mod = types.ModuleType("google.protobuf.json_format")
# Prefer real google.protobuf modules when available, otherwise provide stubs
try:
from google.protobuf import json_format as json_format_mod # type: ignore
from google.protobuf import message as message_mod # type: ignore
except Exception: # pragma: no cover - protobuf may be missing in CI
json_format_mod = types.ModuleType("google.protobuf.json_format")
def message_to_dict(obj, *_, **__):
if hasattr(obj, "to_dict"):
return obj.to_dict()
if hasattr(obj, "__dict__"):
return dict(obj.__dict__)
return {}
def message_to_dict(obj, *_, **__):
if hasattr(obj, "to_dict"):
return obj.to_dict()
if hasattr(obj, "__dict__"):
return dict(obj.__dict__)
return {}
json_format_mod.MessageToDict = message_to_dict
json_format_mod.MessageToDict = message_to_dict
message_mod = types.ModuleType("google.protobuf.message")
message_mod = types.ModuleType("google.protobuf.message")
class DummyProtoMessage:
pass
class DummyProtoMessage:
pass
message_mod.Message = DummyProtoMessage
class DummyDecodeError(Exception):
pass
protobuf_mod = types.ModuleType("google.protobuf")
protobuf_mod.json_format = json_format_mod
protobuf_mod.message = message_mod
message_mod.Message = DummyProtoMessage
message_mod.DecodeError = DummyDecodeError
google_mod = types.ModuleType("google")
google_mod.protobuf = protobuf_mod
protobuf_mod = types.ModuleType("google.protobuf")
protobuf_mod.json_format = json_format_mod
protobuf_mod.message = message_mod
monkeypatch.setitem(sys.modules, "google", google_mod)
monkeypatch.setitem(sys.modules, "google.protobuf", protobuf_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.json_format", json_format_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.message", message_mod)
google_mod = types.ModuleType("google")
google_mod.protobuf = protobuf_mod
monkeypatch.setitem(sys.modules, "google", google_mod)
monkeypatch.setitem(sys.modules, "google.protobuf", protobuf_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.json_format", json_format_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.message", message_mod)
else:
monkeypatch.setitem(sys.modules, "google.protobuf.json_format", json_format_mod)
monkeypatch.setitem(sys.modules, "google.protobuf.message", message_mod)
module_name = "data.mesh"
if module_name in sys.modules:
@@ -102,6 +140,84 @@ def test_snapshot_interval_defaults_to_60_seconds(mesh_module):
assert mesh.SNAPSHOT_SECS == 60
@pytest.mark.parametrize("value", ["mock", "Mock", " disabled "])
def test_create_serial_interface_allows_mock(mesh_module, value):
mesh = mesh_module
iface = mesh._create_serial_interface(value)
assert isinstance(iface.nodes, dict)
iface.close()
def test_create_serial_interface_uses_serial_module(mesh_module, monkeypatch):
mesh = mesh_module
created = {}
sentinel = object()
def fake_interface(*, devPath):
created["devPath"] = devPath
return SimpleNamespace(nodes={"!foo": sentinel}, close=lambda: None)
monkeypatch.setattr(mesh, "SerialInterface", fake_interface)
iface = mesh._create_serial_interface("/dev/ttyTEST")
assert created["devPath"] == "/dev/ttyTEST"
assert iface.nodes == {"!foo": sentinel}
def test_create_serial_interface_uses_tcp_for_ip(mesh_module, monkeypatch):
mesh = mesh_module
created = {}
def fake_tcp_interface(*, hostname, portNumber, **_):
created["hostname"] = hostname
created["portNumber"] = portNumber
return SimpleNamespace(nodes={}, close=lambda: None)
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
iface = mesh._create_serial_interface("192.168.1.25:4500")
assert created == {"hostname": "192.168.1.25", "portNumber": 4500}
assert iface.nodes == {}
def test_create_serial_interface_defaults_tcp_port(mesh_module, monkeypatch):
mesh = mesh_module
created = {}
def fake_tcp_interface(*, hostname, portNumber, **_):
created["hostname"] = hostname
created["portNumber"] = portNumber
return SimpleNamespace(nodes={}, close=lambda: None)
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
mesh._create_serial_interface("tcp://10.20.30.40")
assert created["hostname"] == "10.20.30.40"
assert created["portNumber"] == mesh._DEFAULT_TCP_PORT
def test_create_serial_interface_plain_ip(mesh_module, monkeypatch):
mesh = mesh_module
created = {}
def fake_tcp_interface(*, hostname, portNumber, **_):
created["hostname"] = hostname
created["portNumber"] = portNumber
return SimpleNamespace(nodes={}, close=lambda: None)
monkeypatch.setattr(mesh, "TCPInterface", fake_tcp_interface)
mesh._create_serial_interface(" 192.168.50.10 ")
assert created["hostname"] == "192.168.50.10"
assert created["portNumber"] == mesh._DEFAULT_TCP_PORT
def test_node_to_dict_handles_nested_structures(mesh_module):
mesh = mesh_module
@@ -176,6 +292,316 @@ def test_store_packet_dict_posts_text_message(mesh_module, monkeypatch):
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_store_packet_dict_posts_position(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 200498337,
"rxTime": 1_758_624_186,
"fromId": "!b1fa2b07",
"toId": "^all",
"rxSnr": -9.5,
"rxRssi": -104,
"decoded": {
"portnum": "POSITION_APP",
"bitfield": 1,
"position": {
"latitudeI": int(52.518912 * 1e7),
"longitudeI": int(13.5512064 * 1e7),
"altitude": -16,
"time": 1_758_624_189,
"locationSource": "LOC_INTERNAL",
"precisionBits": 17,
"satsInView": 7,
"PDOP": 211,
"groundSpeed": 2,
"groundTrack": 0,
"raw": {
"latitude_i": int(52.518912 * 1e7),
"longitude_i": int(13.5512064 * 1e7),
"altitude": -16,
"time": 1_758_624_189,
},
},
"payload": {
"__bytes_b64__": "DQDATR8VAMATCBjw//////////8BJb150mgoAljTAXgCgAEAmAEHuAER",
},
},
}
mesh.store_packet_dict(packet)
assert captured, "Expected POST to be triggered for position packet"
path, payload, priority = captured[0]
assert path == "/api/positions"
assert priority == mesh._POSITION_POST_PRIORITY
assert payload["id"] == 200498337
assert payload["node_id"] == "!b1fa2b07"
assert payload["node_num"] == int("b1fa2b07", 16)
assert payload["num"] == payload["node_num"]
assert payload["rx_time"] == 1_758_624_186
assert payload["rx_iso"] == mesh._iso(1_758_624_186)
assert payload["latitude"] == pytest.approx(52.518912)
assert payload["longitude"] == pytest.approx(13.5512064)
assert payload["altitude"] == pytest.approx(-16)
assert payload["position_time"] == 1_758_624_189
assert payload["location_source"] == "LOC_INTERNAL"
assert payload["precision_bits"] == 17
assert payload["sats_in_view"] == 7
assert payload["pdop"] == pytest.approx(211.0)
assert payload["ground_speed"] == pytest.approx(2.0)
assert payload["ground_track"] == pytest.approx(0.0)
assert payload["snr"] == pytest.approx(-9.5)
assert payload["rssi"] == -104
assert payload["hop_limit"] is None
assert payload["bitfield"] == 1
assert (
payload["payload_b64"]
== "DQDATR8VAMATCBjw//////////8BJb150mgoAljTAXgCgAEAmAEHuAER"
)
assert payload["raw"]["time"] == 1_758_624_189
def test_store_packet_dict_handles_nodeinfo_packet(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
from meshtastic.protobuf import config_pb2, mesh_pb2
node_info = mesh_pb2.NodeInfo()
node_info.num = 321
user = node_info.user
user.id = "!abcd1234"
user.short_name = "LoRa"
user.long_name = "LoRa Node"
user.role = config_pb2.Config.DeviceConfig.Role.Value("CLIENT")
user.hw_model = mesh_pb2.HardwareModel.Value("TBEAM")
node_info.device_metrics.battery_level = 87
node_info.device_metrics.voltage = 3.91
node_info.device_metrics.channel_utilization = 5.5
node_info.device_metrics.air_util_tx = 0.12
node_info.device_metrics.uptime_seconds = 4321
node_info.position.latitude_i = int(52.5 * 1e7)
node_info.position.longitude_i = int(13.4 * 1e7)
node_info.position.altitude = 48
node_info.position.time = 1_700_000_050
node_info.position.location_source = mesh_pb2.Position.LocSource.Value(
"LOC_INTERNAL"
)
node_info.snr = 9.5
node_info.last_heard = 1_700_000_040
node_info.hops_away = 2
node_info.is_favorite = True
payload_b64 = base64.b64encode(node_info.SerializeToString()).decode()
packet = {
"id": 999,
"rxTime": 1_700_000_200,
"from": int("abcd1234", 16),
"rxSnr": -5.5,
"decoded": {
"portnum": "NODEINFO_APP",
"payload": {"__bytes_b64__": payload_b64},
},
}
mesh.store_packet_dict(packet)
assert captured, "Expected nodeinfo packet to trigger POST"
path, payload, priority = captured[0]
assert path == "/api/nodes"
assert priority == mesh._NODE_POST_PRIORITY
assert "!abcd1234" in payload
node_entry = payload["!abcd1234"]
assert node_entry["num"] == 321
assert node_entry["lastHeard"] == 1_700_000_200
assert node_entry["snr"] == pytest.approx(9.5)
assert node_entry["hopsAway"] == 2
assert node_entry["isFavorite"] is True
assert node_entry["user"]["shortName"] == "LoRa"
assert node_entry["deviceMetrics"]["batteryLevel"] == pytest.approx(87)
assert node_entry["deviceMetrics"]["voltage"] == pytest.approx(3.91)
assert node_entry["deviceMetrics"]["uptimeSeconds"] == 4321
assert node_entry["position"]["latitude"] == pytest.approx(52.5)
assert node_entry["position"]["longitude"] == pytest.approx(13.4)
assert node_entry["position"]["time"] == 1_700_000_050
def test_store_packet_dict_handles_user_only_nodeinfo(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
from meshtastic.protobuf import mesh_pb2
user_msg = mesh_pb2.User()
user_msg.id = "!11223344"
user_msg.short_name = "Test"
user_msg.long_name = "Test Node"
payload_b64 = base64.b64encode(user_msg.SerializeToString()).decode()
packet = {
"id": 42,
"rxTime": 1_234,
"from": int("11223344", 16),
"decoded": {
"portnum": "NODEINFO_APP",
"payload": {"__bytes_b64__": payload_b64},
"user": {
"id": "!11223344",
"shortName": "Test",
"longName": "Test Node",
"hwModel": "HELTEC_V3",
},
},
}
mesh.store_packet_dict(packet)
assert captured
_, payload, _ = captured[0]
node_entry = payload["!11223344"]
assert node_entry["lastHeard"] == 1_234
assert node_entry["user"]["longName"] == "Test Node"
assert "deviceMetrics" not in node_entry
def test_store_packet_dict_nodeinfo_merges_proto_user(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
from meshtastic.protobuf import mesh_pb2
user_msg = mesh_pb2.User()
user_msg.id = "!44556677"
user_msg.short_name = "Proto"
user_msg.long_name = "Proto User"
node_info = mesh_pb2.NodeInfo()
node_info.snr = 2.5
payload_b64 = base64.b64encode(node_info.SerializeToString()).decode()
packet = {
"id": 73,
"rxTime": 5_000,
"fromId": "!44556677",
"decoded": {
"portnum": "NODEINFO_APP",
"payload": {"__bytes_b64__": payload_b64},
"user": user_msg,
},
}
mesh.store_packet_dict(packet)
assert captured
_, payload, _ = captured[0]
node_entry = payload["!44556677"]
assert node_entry["lastHeard"] == 5_000
assert node_entry["user"]["shortName"] == "Proto"
assert node_entry["user"]["longName"] == "Proto User"
def test_store_packet_dict_nodeinfo_sanitizes_nested_proto(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
from meshtastic.protobuf import mesh_pb2
user_msg = mesh_pb2.User()
user_msg.id = "!55667788"
user_msg.short_name = "Nested"
node_info = mesh_pb2.NodeInfo()
node_info.hops_away = 1
payload_b64 = base64.b64encode(node_info.SerializeToString()).decode()
packet = {
"id": 74,
"rxTime": 6_000,
"fromId": "!55667788",
"decoded": {
"portnum": "NODEINFO_APP",
"payload": {"__bytes_b64__": payload_b64},
"user": {
"id": "!55667788",
"shortName": "Nested",
"raw": user_msg,
},
},
}
mesh.store_packet_dict(packet)
assert captured
_, payload, _ = captured[0]
node_entry = payload["!55667788"]
assert node_entry["user"]["shortName"] == "Nested"
assert isinstance(node_entry["user"]["raw"], dict)
assert node_entry["user"]["raw"]["id"] == "!55667788"
def test_store_packet_dict_nodeinfo_uses_from_id_when_user_missing(
mesh_module, monkeypatch
):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
from meshtastic.protobuf import mesh_pb2
node_info = mesh_pb2.NodeInfo()
node_info.snr = 1.5
node_info.last_heard = 100
payload_b64 = base64.b64encode(node_info.SerializeToString()).decode()
packet = {
"id": 7,
"rxTime": 200,
"from": 0x01020304,
"decoded": {"portnum": 5, "payload": {"__bytes_b64__": payload_b64}},
}
mesh.store_packet_dict(packet)
assert captured
_, payload, _ = captured[0]
assert "!01020304" in payload
node_entry = payload["!01020304"]
assert node_entry["num"] == 0x01020304
assert node_entry["lastHeard"] == 200
assert node_entry["snr"] == pytest.approx(1.5)
def test_store_packet_dict_ignores_non_text(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
@@ -192,7 +618,7 @@ def test_store_packet_dict_ignores_non_text(mesh_module, monkeypatch):
"toId": "!def",
"decoded": {
"payload": {"text": "ignored"},
"portnum": "POSITION_APP",
"portnum": "ENVIRONMENTAL_MEASUREMENT",
},
}
@@ -352,6 +778,116 @@ def test_pkt_to_dict_handles_dict_and_proto(mesh_module, monkeypatch):
assert isinstance(fallback["_unparsed"], str)
def test_main_retries_interface_creation(mesh_module, monkeypatch):
mesh = mesh_module
attempts = []
class DummyEvent:
def __init__(self):
self.wait_calls = 0
def is_set(self):
return self.wait_calls >= 3
def set(self):
self.wait_calls = 3
def wait(self, timeout):
self.wait_calls += 1
return self.is_set()
class DummyInterface:
def __init__(self):
self.closed = False
self.nodes = {}
def close(self):
self.closed = True
iface = DummyInterface()
def fake_create(port):
attempts.append(port)
if len(attempts) < 3:
raise RuntimeError("boom")
return iface
monkeypatch.setattr(mesh, "_create_serial_interface", fake_create)
monkeypatch.setattr(mesh.threading, "Event", DummyEvent)
monkeypatch.setattr(mesh.signal, "signal", lambda *_, **__: None)
monkeypatch.setattr(mesh, "SNAPSHOT_SECS", 0)
monkeypatch.setattr(mesh, "_RECONNECT_INITIAL_DELAY_SECS", 0)
monkeypatch.setattr(mesh, "_RECONNECT_MAX_DELAY_SECS", 0)
mesh.main()
assert len(attempts) == 3
assert iface.closed is True
def test_main_recreates_interface_after_snapshot_error(mesh_module, monkeypatch):
mesh = mesh_module
class DummyEvent:
def __init__(self):
self.wait_calls = 0
def is_set(self):
return self.wait_calls >= 2
def set(self):
self.wait_calls = 2
def wait(self, timeout):
self.wait_calls += 1
return self.is_set()
interfaces = []
def fake_create(port):
fail_first = not interfaces
class FlakyInterface:
def __init__(self, should_fail):
self.closed = False
self._should_fail = should_fail
self._calls = 0
@property
def nodes(self):
self._calls += 1
if self._should_fail and self._calls == 1:
raise RuntimeError("temporary failure")
return {"!node": {"id": 1}}
def close(self):
self.closed = True
interface = FlakyInterface(fail_first)
interfaces.append(interface)
return interface
upsert_calls = []
def record_upsert(node_id, node):
upsert_calls.append(node_id)
monkeypatch.setattr(mesh, "_create_serial_interface", fake_create)
monkeypatch.setattr(mesh, "upsert_node", record_upsert)
monkeypatch.setattr(mesh.threading, "Event", DummyEvent)
monkeypatch.setattr(mesh.signal, "signal", lambda *_, **__: None)
monkeypatch.setattr(mesh, "SNAPSHOT_SECS", 0)
monkeypatch.setattr(mesh, "_RECONNECT_INITIAL_DELAY_SECS", 0)
monkeypatch.setattr(mesh, "_RECONNECT_MAX_DELAY_SECS", 0)
mesh.main()
assert len(interfaces) >= 2
assert interfaces[0].closed is True
assert upsert_calls == ["!node"]
def test_store_packet_dict_uses_top_level_channel(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
@@ -378,6 +914,7 @@ def test_store_packet_dict_uses_top_level_channel(mesh_module, monkeypatch):
assert payload["channel"] == 5
assert payload["portnum"] == "1"
assert payload["text"] == "hi"
assert payload["encrypted"] is None
assert payload["snr"] is None and payload["rssi"] is None
assert priority == mesh._MESSAGE_POST_PRIORITY
@@ -408,10 +945,41 @@ def test_store_packet_dict_handles_invalid_channel(mesh_module, monkeypatch):
path, payload, priority = captured[0]
assert path == "/api/messages"
assert payload["channel"] == 0
assert payload["encrypted"] is None
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_post_queue_prioritises_nodes(mesh_module, monkeypatch):
def test_store_packet_dict_includes_encrypted_payload(mesh_module, monkeypatch):
mesh = mesh_module
captured = []
monkeypatch.setattr(
mesh,
"_queue_post_json",
lambda path, payload, *, priority: captured.append((path, payload, priority)),
)
packet = {
"id": 555,
"rxTime": 111,
"from": 2988082812,
"to": "!receiver",
"channel": 8,
"encrypted": "abc123==",
}
mesh.store_packet_dict(packet)
assert captured
path, payload, priority = captured[0]
assert path == "/api/messages"
assert payload["encrypted"] == "abc123=="
assert payload["text"] is None
assert payload["from_id"] == 2988082812
assert payload["to_id"] == "!receiver"
assert priority == mesh._MESSAGE_POST_PRIORITY
def test_post_queue_prioritises_messages(mesh_module, monkeypatch):
mesh = mesh_module
mesh._clear_post_queue()
calls = []
@@ -428,7 +996,7 @@ def test_post_queue_prioritises_nodes(mesh_module, monkeypatch):
mesh._drain_post_queue()
assert [path for path, _ in calls] == ["/api/nodes", "/api/messages"]
assert [path for path, _ in calls] == ["/api/messages", "/api/nodes"]
def test_store_packet_dict_requires_id(mesh_module, monkeypatch):

79
web/Dockerfile Normal file
View File

@@ -0,0 +1,79 @@
# Main application builder stage
FROM ruby:3.3-alpine AS builder
# Ensure native extensions are built against musl libc rather than
# using glibc precompiled binaries (which fail on Alpine).
ENV BUNDLE_FORCE_RUBY_PLATFORM=true
# Install build dependencies and SQLite3
RUN apk add --no-cache \
build-base \
sqlite-dev \
linux-headers \
pkgconfig
# Set working directory
WORKDIR /app
# Copy Gemfile and install dependencies
COPY web/Gemfile web/Gemfile.lock* ./
# Install gems with SQLite3 support
RUN bundle config set --local force_ruby_platform true && \
bundle config set --local without 'development test' && \
bundle install --jobs=4 --retry=3
# Production stage
FROM ruby:3.3-alpine AS production
# Install runtime dependencies
RUN apk add --no-cache \
sqlite \
tzdata \
curl
# Create non-root user
RUN addgroup -g 1000 -S potatomesh && \
adduser -u 1000 -S potatomesh -G potatomesh
# Set working directory
WORKDIR /app
# Copy installed gems from builder stage
COPY --from=builder /usr/local/bundle /usr/local/bundle
# Copy application code (exclude Dockerfile from web directory)
COPY --chown=potatomesh:potatomesh web/app.rb web/app.sh web/Gemfile web/Gemfile.lock* web/public/ web/spec/ ./
COPY --chown=potatomesh:potatomesh web/views/ ./views/
# Copy SQL schema files from data directory
COPY --chown=potatomesh:potatomesh data/*.sql /data/
# Create data directory for SQLite database
RUN mkdir -p /app/data && \
chown -R potatomesh:potatomesh /app/data
# Switch to non-root user
USER potatomesh
# Expose port
EXPOSE 41447
# Default environment variables (can be overridden by host)
ENV APP_ENV=production \
MESH_DB=/app/data/mesh.db \
DB_BUSY_TIMEOUT_MS=5000 \
DB_BUSY_MAX_RETRIES=5 \
DB_BUSY_RETRY_DELAY=0.05 \
MAX_JSON_BODY_BYTES=1048576 \
SITE_NAME="Berlin Mesh Network" \
DEFAULT_CHANNEL="#MediumFast" \
DEFAULT_FREQUENCY="868MHz" \
MAP_CENTER_LAT=52.502889 \
MAP_CENTER_LON=13.404194 \
MAX_NODE_DISTANCE_KM=50 \
MATRIX_ROOM="" \
DEBUG=0
# Start the application
CMD ["ruby", "app.rb", "-p", "41447", "-o", "0.0.0.0"]

View File

@@ -24,6 +24,8 @@ require "sqlite3"
require "fileutils"
require "logger"
require "rack/utils"
require "open3"
require "time"
DB_PATH = ENV.fetch("MESH_DB", File.join(__dir__, "../data/mesh.db"))
DB_BUSY_TIMEOUT_MS = ENV.fetch("DB_BUSY_TIMEOUT_MS", "5000").to_i
@@ -38,19 +40,188 @@ MAX_JSON_BODY_BYTES = begin
rescue ArgumentError
DEFAULT_MAX_JSON_BODY_BYTES
end
VERSION_FALLBACK = "v0.3.0"
def fetch_config_string(key, default)
value = ENV[key]
return default if value.nil?
trimmed = value.strip
trimmed.empty? ? default : trimmed
end
def determine_app_version
repo_root = File.expand_path("..", __dir__)
git_dir = File.join(repo_root, ".git")
return VERSION_FALLBACK unless File.directory?(git_dir)
stdout, status = Open3.capture2("git", "-C", repo_root, "describe", "--tags", "--long", "--abbrev=7")
return VERSION_FALLBACK unless status.success?
raw = stdout.strip
return VERSION_FALLBACK if raw.empty?
match = /\A(?<tag>.+)-(?<count>\d+)-g(?<hash>[0-9a-f]+)\z/.match(raw)
return raw unless match
tag = match[:tag]
count = match[:count].to_i
hash = match[:hash]
return tag if count.zero?
"#{tag}+#{count}-#{hash}"
rescue StandardError
VERSION_FALLBACK
end
APP_VERSION = determine_app_version
set :public_folder, File.join(__dir__, "public")
set :views, File.join(__dir__, "views")
SITE_NAME = ENV.fetch("SITE_NAME", "Meshtastic Berlin")
DEFAULT_CHANNEL = ENV.fetch("DEFAULT_CHANNEL", "#MediumFast")
DEFAULT_FREQUENCY = ENV.fetch("DEFAULT_FREQUENCY", "868MHz")
get "/favicon.ico" do
cache_control :public, max_age: WEEK_SECONDS
ico_path = File.join(settings.public_folder, "favicon.ico")
if File.file?(ico_path)
send_file ico_path, type: "image/x-icon"
else
send_file File.join(settings.public_folder, "potatomesh-logo.svg"), type: "image/svg+xml"
end
end
SITE_NAME = fetch_config_string("SITE_NAME", "Meshtastic Berlin")
DEFAULT_CHANNEL = fetch_config_string("DEFAULT_CHANNEL", "#MediumFast")
DEFAULT_FREQUENCY = fetch_config_string("DEFAULT_FREQUENCY", "868MHz")
MAP_CENTER_LAT = ENV.fetch("MAP_CENTER_LAT", "52.502889").to_f
MAP_CENTER_LON = ENV.fetch("MAP_CENTER_LON", "13.404194").to_f
MAX_NODE_DISTANCE_KM = ENV.fetch("MAX_NODE_DISTANCE_KM", "137").to_f
MATRIX_ROOM = ENV.fetch("MATRIX_ROOM", "#meshtastic-berlin:matrix.org")
DEBUG = ENV["DEBUG"] == "1"
def sanitized_string(value)
value.to_s.strip
end
def sanitized_site_name
sanitized_string(SITE_NAME)
end
def sanitized_default_channel
sanitized_string(DEFAULT_CHANNEL)
end
def sanitized_default_frequency
sanitized_string(DEFAULT_FREQUENCY)
end
def sanitized_matrix_room
value = sanitized_string(MATRIX_ROOM)
value.empty? ? nil : value
end
def string_or_nil(value)
return nil if value.nil?
str = value.is_a?(String) ? value : value.to_s
trimmed = str.strip
trimmed.empty? ? nil : trimmed
end
def coerce_integer(value)
case value
when Integer
value
when Float
value.finite? ? value.to_i : nil
when Numeric
value.to_i
when String
trimmed = value.strip
return nil if trimmed.empty?
return trimmed.to_i(16) if trimmed.match?(/\A0[xX][0-9A-Fa-f]+\z/)
return trimmed.to_i(10) if trimmed.match?(/\A-?\d+\z/)
begin
float_val = Float(trimmed)
float_val.finite? ? float_val.to_i : nil
rescue ArgumentError
nil
end
else
nil
end
end
def coerce_float(value)
case value
when Float
value.finite? ? value : nil
when Integer
value.to_f
when Numeric
value.to_f
when String
trimmed = value.strip
return nil if trimmed.empty?
begin
float_val = Float(trimmed)
float_val.finite? ? float_val : nil
rescue ArgumentError
nil
end
else
nil
end
end
def sanitized_max_distance_km
return nil unless defined?(MAX_NODE_DISTANCE_KM)
distance = MAX_NODE_DISTANCE_KM
return nil unless distance.is_a?(Numeric)
return nil unless distance.positive?
distance
end
def formatted_distance_km(distance)
format("%.1f", distance).sub(/\.0\z/, "")
end
def meta_description
site = sanitized_site_name
channel = sanitized_default_channel
frequency = sanitized_default_frequency
matrix = sanitized_matrix_room
summary = "Live Meshtastic mesh map for #{site}"
if channel.empty? && frequency.empty?
summary += "."
elsif channel.empty?
summary += " tuned to #{frequency}."
elsif frequency.empty?
summary += " on #{channel}."
else
summary += " on #{channel} (#{frequency})."
end
sentences = [summary, "Track nodes, messages, and coverage in real time."]
if (distance = sanitized_max_distance_km)
sentences << "Shows nodes within roughly #{formatted_distance_km(distance)} km of the map center."
end
sentences << "Join the community in #{matrix} on Matrix." if matrix
sentences.join(" ")
end
def meta_configuration
site = sanitized_site_name
{
title: site,
name: site,
description: meta_description,
}
end
class << Sinatra::Application
def apply_logger_level!
logger = settings.logger
@@ -103,8 +274,9 @@ end
def db_schema_present?
return false unless File.exist?(DB_PATH)
db = open_database(readonly: true)
tables = db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages')").flatten
tables.include?("nodes") && tables.include?("messages")
required = %w[nodes messages positions]
tables = db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name IN ('nodes','messages','positions')").flatten
(required - tables).empty?
rescue SQLite3::Exception
false
ensure
@@ -117,7 +289,7 @@ end
def init_db
FileUtils.mkdir_p(File.dirname(DB_PATH))
db = open_database
%w[nodes messages].each do |schema|
%w[nodes messages positions].each do |schema|
sql_file = File.expand_path("../data/#{schema}.sql", __dir__)
db.execute_batch(File.read(sql_file))
end
@@ -182,16 +354,17 @@ def query_messages(limit)
SELECT m.*, n.*, m.snr AS msg_snr
FROM messages m
LEFT JOIN nodes n ON (
m.from_id = n.node_id OR (
CAST(m.from_id AS TEXT) <> '' AND
CAST(m.from_id AS TEXT) GLOB '[0-9]*' AND
CAST(m.from_id AS INTEGER) = n.num
m.from_id IS NOT NULL AND TRIM(m.from_id) <> '' AND (
m.from_id = n.node_id OR (
m.from_id GLOB '[0-9]*' AND CAST(m.from_id AS INTEGER) = n.num
)
)
)
WHERE COALESCE(TRIM(m.encrypted), '') = ''
ORDER BY m.rx_time DESC
LIMIT ?
SQL
msg_fields = %w[id rx_time rx_iso from_id to_id channel portnum text msg_snr rssi hop_limit]
msg_fields = %w[id rx_time rx_iso from_id to_id channel portnum text encrypted msg_snr rssi hop_limit]
rows.each do |r|
if DEBUG && (r["from_id"].nil? || r["from_id"].to_s.empty?)
raw = db.execute("SELECT * FROM messages WHERE id = ?", [r["id"]]).first
@@ -204,7 +377,8 @@ def query_messages(limit)
node[k] = r.delete(k)
end
r["snr"] = r.delete("msg_snr")
if r["from_id"] && (node["node_id"].nil? || node["node_id"].to_s.empty?)
references = [r["from_id"]].compact
if references.any? && (node["node_id"].nil? || node["node_id"].to_s.empty?)
lookup_keys = []
canonical = normalize_node_id(db, r["from_id"])
lookup_keys << canonical if canonical
@@ -227,6 +401,16 @@ def query_messages(limit)
end
node["role"] = "CLIENT" if node.key?("role") && (node["role"].nil? || node["role"].to_s.empty?)
r["node"] = node
canonical_from_id = string_or_nil(node["node_id"]) || string_or_nil(normalize_node_id(db, r["from_id"]))
if canonical_from_id
raw_from_id = string_or_nil(r["from_id"])
if raw_from_id.nil? || raw_from_id.match?(/\A[0-9]+\z/)
r["from_id"] = canonical_from_id
elsif raw_from_id.start_with?("!") && raw_from_id.casecmp(canonical_from_id) != 0
r["from_id"] = canonical_from_id
end
end
if DEBUG && (r["from_id"].nil? || r["from_id"].to_s.empty?)
Kernel.warn "[debug] row after processing: #{r.inspect}"
end
@@ -236,6 +420,40 @@ ensure
db&.close
end
# Retrieve recorded position packets ordered by receive time.
#
# @param limit [Integer] maximum number of rows returned.
# @return [Array<Hash>] collection of position rows formatted for the API.
def query_positions(limit)
db = open_database(readonly: true)
db.results_as_hash = true
rows = db.execute <<~SQL, [limit]
SELECT id, node_id, node_num, rx_time, rx_iso, position_time,
to_id, latitude, longitude, altitude, location_source,
precision_bits, sats_in_view, pdop, ground_speed,
ground_track, snr, rssi, hop_limit, bitfield,
payload_b64
FROM positions
ORDER BY rx_time DESC
LIMIT ?
SQL
rows.each do |r|
pt = r["position_time"]
if pt
begin
r["position_time"] = Integer(pt, 10)
rescue ArgumentError, TypeError
r["position_time"] = coerce_integer(pt)
end
end
pt_val = r["position_time"]
r["position_time_iso"] = Time.at(pt_val).utc.iso8601 if pt_val
end
rows
ensure
db&.close
end
# GET /api/messages
#
# Returns a JSON array of stored text messages including node metadata.
@@ -245,6 +463,15 @@ get "/api/messages" do
query_messages(limit).to_json
end
# GET /api/positions
#
# Returns a JSON array of recorded position packets.
get "/api/positions" do
content_type :json
limit = [params["limit"]&.to_i || 200, 1000].min
query_positions(limit).to_json
end
# Determine the numeric node reference for a canonical node identifier.
#
# The Meshtastic protobuf encodes the node ID as a hexadecimal string prefixed
@@ -287,6 +514,117 @@ rescue ArgumentError
nil
end
# Determine canonical node identifiers and derived metadata for a reference.
#
# @param node_ref [Object] raw node identifier or numeric reference.
# @param fallback_num [Object] optional numeric reference used when the
# identifier does not encode the value directly.
# @return [Array(String, Integer, String), nil] tuple containing the canonical
# node ID, numeric node reference, and uppercase short identifier suffix when
# the reference can be parsed. Returns nil when the reference cannot be
# converted into a canonical ID.
def canonical_node_parts(node_ref, fallback_num = nil)
fallback = coerce_integer(fallback_num)
hex = nil
num = nil
case node_ref
when Integer
num = node_ref
when Numeric
num = node_ref.to_i
when String
trimmed = node_ref.strip
return nil if trimmed.empty?
if trimmed.start_with?("!")
hex = trimmed.delete_prefix("!")
elsif trimmed.match?(/\A0[xX][0-9A-Fa-f]+\z/)
hex = trimmed[2..].to_s
elsif trimmed.match?(/\A-?\d+\z/)
num = trimmed.to_i
elsif trimmed.match?(/\A[0-9A-Fa-f]+\z/)
hex = trimmed
else
return nil
end
when nil
num = fallback if fallback
else
return nil
end
num ||= fallback if fallback
if hex
begin
num ||= Integer(hex, 16)
rescue ArgumentError
return nil
end
elsif num
return nil if num.negative?
hex = format("%08x", num & 0xFFFFFFFF)
else
return nil
end
return nil if hex.nil? || hex.empty?
begin
parsed = Integer(hex, 16)
rescue ArgumentError
return nil
end
parsed &= 0xFFFFFFFF
canonical_hex = format("%08x", parsed)
short_id = canonical_hex[-4, 4].upcase
["!#{canonical_hex}", parsed, short_id]
end
# Ensure a placeholder node entry exists for the provided identifier.
#
# Messages and telemetry can reference nodes before the daemon has received a
# full node snapshot. When this happens we create a minimal hidden entry so the
# sender can be resolved in the UI until richer metadata becomes available.
#
# @param db [SQLite3::Database] open database handle.
# @param node_ref [Object] raw identifier extracted from the payload.
# @param fallback_num [Object] optional numeric reference used when the
# identifier is missing.
def ensure_unknown_node(db, node_ref, fallback_num = nil, heard_time: nil)
parts = canonical_node_parts(node_ref, fallback_num)
return unless parts
node_id, node_num, short_id = parts
existing = db.get_first_value(
"SELECT 1 FROM nodes WHERE node_id = ? LIMIT 1",
[node_id],
)
return if existing
long_name = "Meshtastic #{short_id}"
heard_time = coerce_integer(heard_time)
inserted = false
with_busy_retry do
db.execute(
<<~SQL,
INSERT OR IGNORE INTO nodes(node_id,num,short_name,long_name,role,last_heard,first_heard)
VALUES (?,?,?,?,?,?,?)
SQL
[node_id, node_num, short_id, long_name, "CLIENT_HIDDEN", heard_time, heard_time],
)
inserted = db.changes.positive?
end
inserted
end
# Insert or update a node row with the most recent metrics.
#
# @param db [SQLite3::Database] open database handle.
@@ -297,12 +635,13 @@ def upsert_node(db, node_id, n)
met = n["deviceMetrics"] || {}
pos = n["position"] || {}
role = user["role"] || "CLIENT"
lh = n["lastHeard"]
pt = pos["time"]
lh = coerce_integer(n["lastHeard"])
pt = coerce_integer(pos["time"])
now = Time.now.to_i
pt = nil if pt && pt > now
lh = now if lh && lh > now
lh = pt if pt && (!lh || lh < pt)
lh ||= now
bool = ->(v) {
case v
when true then 1
@@ -348,6 +687,7 @@ def upsert_node(db, node_id, n)
num=excluded.num, short_name=excluded.short_name, long_name=excluded.long_name, macaddr=excluded.macaddr,
hw_model=excluded.hw_model, role=excluded.role, public_key=excluded.public_key, is_unmessagable=excluded.is_unmessagable,
is_favorite=excluded.is_favorite, hops_away=excluded.hops_away, snr=excluded.snr, last_heard=excluded.last_heard,
first_heard=COALESCE(nodes.first_heard, excluded.first_heard, excluded.last_heard),
battery_level=excluded.battery_level, voltage=excluded.voltage, channel_utilization=excluded.channel_utilization,
air_util_tx=excluded.air_util_tx, uptime_seconds=excluded.uptime_seconds, position_time=excluded.position_time,
location_source=excluded.location_source, latitude=excluded.latitude, longitude=excluded.longitude,
@@ -411,6 +751,272 @@ def prefer_canonical_sender?(message)
message.is_a?(Hash) && message.key?("packet_id") && !message.key?("id")
end
# Update or create a node entry using information from a position payload.
#
# @param db [SQLite3::Database] open database handle.
# @param node_id [String, nil] canonical node identifier when available.
# @param node_num [Integer, nil] numeric node reference if known.
# @param rx_time [Integer] time the packet was received by the gateway.
# @param position_time [Integer, nil] timestamp reported by the device.
# @param location_source [String, nil] location source flag from the packet.
# @param latitude [Float, nil] reported latitude.
# @param longitude [Float, nil] reported longitude.
# @param altitude [Float, nil] reported altitude.
# @param snr [Float, nil] link SNR for the packet.
def update_node_from_position(db, node_id, node_num, rx_time, position_time, location_source, latitude, longitude, altitude, snr)
num = coerce_integer(node_num)
id = string_or_nil(node_id)
if id&.start_with?("!")
id = "!#{id.delete_prefix("!").downcase}"
end
id ||= format("!%08x", num & 0xFFFFFFFF) if num
return unless id
now = Time.now.to_i
rx = coerce_integer(rx_time) || now
rx = now if rx && rx > now
pos_time = coerce_integer(position_time)
pos_time = nil if pos_time && pos_time > now
last_heard = [rx, pos_time].compact.max || rx
last_heard = now if last_heard && last_heard > now
loc = string_or_nil(location_source)
lat = coerce_float(latitude)
lon = coerce_float(longitude)
alt = coerce_float(altitude)
snr_val = coerce_float(snr)
row = [
id,
num,
last_heard,
last_heard,
pos_time,
loc,
lat,
lon,
alt,
snr_val,
]
with_busy_retry do
db.execute <<~SQL, row
INSERT INTO nodes(node_id,num,last_heard,first_heard,position_time,location_source,latitude,longitude,altitude,snr)
VALUES (?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(node_id) DO UPDATE SET
num=COALESCE(excluded.num,nodes.num),
snr=COALESCE(excluded.snr,nodes.snr),
last_heard=MAX(COALESCE(nodes.last_heard,0),COALESCE(excluded.last_heard,0)),
first_heard=COALESCE(nodes.first_heard, excluded.first_heard, excluded.last_heard),
position_time=CASE
WHEN COALESCE(excluded.position_time,0) >= COALESCE(nodes.position_time,0)
THEN excluded.position_time
ELSE nodes.position_time
END,
location_source=CASE
WHEN COALESCE(excluded.position_time,0) >= COALESCE(nodes.position_time,0)
AND excluded.location_source IS NOT NULL
THEN excluded.location_source
ELSE nodes.location_source
END,
latitude=CASE
WHEN COALESCE(excluded.position_time,0) >= COALESCE(nodes.position_time,0)
AND excluded.latitude IS NOT NULL
THEN excluded.latitude
ELSE nodes.latitude
END,
longitude=CASE
WHEN COALESCE(excluded.position_time,0) >= COALESCE(nodes.position_time,0)
AND excluded.longitude IS NOT NULL
THEN excluded.longitude
ELSE nodes.longitude
END,
altitude=CASE
WHEN COALESCE(excluded.position_time,0) >= COALESCE(nodes.position_time,0)
AND excluded.altitude IS NOT NULL
THEN excluded.altitude
ELSE nodes.altitude
END
SQL
end
end
# Insert a position packet into the history table and refresh node metadata.
#
# @param db [SQLite3::Database] open database handle.
# @param payload [Hash] position payload provided by the data daemon.
def insert_position(db, payload)
pos_id = coerce_integer(payload["id"] || payload["packet_id"])
return unless pos_id
now = Time.now.to_i
rx_time = coerce_integer(payload["rx_time"])
rx_time = now if rx_time.nil? || rx_time > now
rx_iso = string_or_nil(payload["rx_iso"])
rx_iso ||= Time.at(rx_time).utc.iso8601
raw_node_id = payload["node_id"] || payload["from_id"] || payload["from"]
node_id = string_or_nil(raw_node_id)
node_id = "!#{node_id.delete_prefix("!").downcase}" if node_id&.start_with?("!")
raw_node_num = coerce_integer(payload["node_num"]) || coerce_integer(payload["num"])
node_id ||= format("!%08x", raw_node_num & 0xFFFFFFFF) if node_id.nil? && raw_node_num
payload_for_num = payload.is_a?(Hash) ? payload.dup : {}
payload_for_num["num"] ||= raw_node_num if raw_node_num
node_num = resolve_node_num(node_id, payload_for_num)
node_num ||= raw_node_num
canonical = normalize_node_id(db, node_id || node_num)
node_id = canonical if canonical
ensure_unknown_node(db, node_id || node_num, node_num, heard_time: rx_time)
to_id = string_or_nil(payload["to_id"] || payload["to"])
position_section = payload["position"].is_a?(Hash) ? payload["position"] : {}
lat = coerce_float(payload["latitude"]) || coerce_float(position_section["latitude"])
lon = coerce_float(payload["longitude"]) || coerce_float(position_section["longitude"])
alt = coerce_float(payload["altitude"]) || coerce_float(position_section["altitude"])
lat ||= begin
lat_i = coerce_integer(position_section["latitudeI"] || position_section["latitude_i"] || position_section.dig("raw", "latitude_i"))
lat_i ? lat_i / 1e7 : nil
end
lon ||= begin
lon_i = coerce_integer(position_section["longitudeI"] || position_section["longitude_i"] || position_section.dig("raw", "longitude_i"))
lon_i ? lon_i / 1e7 : nil
end
alt ||= coerce_float(position_section.dig("raw", "altitude"))
position_time = coerce_integer(
payload["position_time"] ||
position_section["time"] ||
position_section.dig("raw", "time"),
)
location_source = string_or_nil(
payload["location_source"] ||
payload["locationSource"] ||
position_section["location_source"] ||
position_section["locationSource"] ||
position_section.dig("raw", "location_source"),
)
precision_bits = coerce_integer(
payload["precision_bits"] ||
payload["precisionBits"] ||
position_section["precision_bits"] ||
position_section["precisionBits"] ||
position_section.dig("raw", "precision_bits"),
)
sats_in_view = coerce_integer(
payload["sats_in_view"] ||
payload["satsInView"] ||
position_section["sats_in_view"] ||
position_section["satsInView"] ||
position_section.dig("raw", "sats_in_view"),
)
pdop = coerce_float(
payload["pdop"] ||
payload["PDOP"] ||
position_section["pdop"] ||
position_section["PDOP"] ||
position_section.dig("raw", "PDOP") ||
position_section.dig("raw", "pdop"),
)
ground_speed = coerce_float(
payload["ground_speed"] ||
payload["groundSpeed"] ||
position_section["ground_speed"] ||
position_section["groundSpeed"] ||
position_section.dig("raw", "ground_speed"),
)
ground_track = coerce_float(
payload["ground_track"] ||
payload["groundTrack"] ||
position_section["ground_track"] ||
position_section["groundTrack"] ||
position_section.dig("raw", "ground_track"),
)
snr = coerce_float(payload["snr"] || payload["rx_snr"] || payload["rxSnr"])
rssi = coerce_integer(payload["rssi"] || payload["rx_rssi"] || payload["rxRssi"])
hop_limit = coerce_integer(payload["hop_limit"] || payload["hopLimit"])
bitfield = coerce_integer(payload["bitfield"])
payload_b64 = string_or_nil(payload["payload_b64"] || payload["payload"])
payload_b64 ||= string_or_nil(position_section.dig("payload", "__bytes_b64__"))
row = [
pos_id,
node_id,
node_num,
rx_time,
rx_iso,
position_time,
to_id,
lat,
lon,
alt,
location_source,
precision_bits,
sats_in_view,
pdop,
ground_speed,
ground_track,
snr,
rssi,
hop_limit,
bitfield,
payload_b64,
]
with_busy_retry do
db.execute <<~SQL, row
INSERT INTO positions(id,node_id,node_num,rx_time,rx_iso,position_time,to_id,latitude,longitude,altitude,location_source,
precision_bits,sats_in_view,pdop,ground_speed,ground_track,snr,rssi,hop_limit,bitfield,payload_b64)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET
node_id=COALESCE(excluded.node_id,positions.node_id),
node_num=COALESCE(excluded.node_num,positions.node_num),
rx_time=excluded.rx_time,
rx_iso=excluded.rx_iso,
position_time=COALESCE(excluded.position_time,positions.position_time),
to_id=COALESCE(excluded.to_id,positions.to_id),
latitude=COALESCE(excluded.latitude,positions.latitude),
longitude=COALESCE(excluded.longitude,positions.longitude),
altitude=COALESCE(excluded.altitude,positions.altitude),
location_source=COALESCE(excluded.location_source,positions.location_source),
precision_bits=COALESCE(excluded.precision_bits,positions.precision_bits),
sats_in_view=COALESCE(excluded.sats_in_view,positions.sats_in_view),
pdop=COALESCE(excluded.pdop,positions.pdop),
ground_speed=COALESCE(excluded.ground_speed,positions.ground_speed),
ground_track=COALESCE(excluded.ground_track,positions.ground_track),
snr=COALESCE(excluded.snr,positions.snr),
rssi=COALESCE(excluded.rssi,positions.rssi),
hop_limit=COALESCE(excluded.hop_limit,positions.hop_limit),
bitfield=COALESCE(excluded.bitfield,positions.bitfield),
payload_b64=COALESCE(excluded.payload_b64,positions.payload_b64)
SQL
end
update_node_from_position(
db,
node_id,
node_num,
rx_time,
position_time,
location_source,
lat,
lon,
alt,
snr,
)
end
# Insert a text message if it does not already exist.
#
# @param db [SQLite3::Database] open database handle.
@@ -418,54 +1024,112 @@ end
def insert_message(db, m)
msg_id = m["id"] || m["packet_id"]
return unless msg_id
rx_time = m["rx_time"]&.to_i || Time.now.to_i
rx_iso = m["rx_iso"] || Time.at(rx_time).utc.iso8601
raw_from_id = m["from_id"]
if raw_from_id.nil? || raw_from_id.to_s.strip.empty?
alt_from = m["from"]
raw_from_id = alt_from unless alt_from.nil? || alt_from.to_s.strip.empty?
end
trimmed_from_id = raw_from_id.nil? ? nil : raw_from_id.to_s.strip
trimmed_from_id = nil if trimmed_from_id&.empty?
canonical_from_id = normalize_node_id(db, raw_from_id)
use_canonical = canonical_from_id && (trimmed_from_id.nil? || prefer_canonical_sender?(m))
from_id = if use_canonical
canonical_from_id.to_s.strip
else
trimmed_from_id
trimmed_from_id = string_or_nil(raw_from_id)
canonical_from_id = string_or_nil(normalize_node_id(db, raw_from_id))
from_id = trimmed_from_id
if canonical_from_id
if from_id.nil?
from_id = canonical_from_id
elsif prefer_canonical_sender?(m)
from_id = canonical_from_id
elsif from_id.start_with?("!") && from_id.casecmp(canonical_from_id) != 0
from_id = canonical_from_id
end
from_id = nil if from_id&.empty?
end
raw_to_id = m["to_id"]
raw_to_id = m["to"] if raw_to_id.nil? || raw_to_id.to_s.strip.empty?
trimmed_to_id = string_or_nil(raw_to_id)
canonical_to_id = string_or_nil(normalize_node_id(db, raw_to_id))
to_id = trimmed_to_id
if canonical_to_id
if to_id.nil?
to_id = canonical_to_id
elsif to_id.start_with?("!") && to_id.casecmp(canonical_to_id) != 0
to_id = canonical_to_id
end
end
encrypted = string_or_nil(m["encrypted"])
ensure_unknown_node(db, from_id || raw_from_id, m["from_num"], heard_time: rx_time)
row = [
msg_id,
rx_time,
rx_iso,
from_id,
m["to_id"],
to_id,
m["channel"],
m["portnum"],
m["text"],
encrypted,
m["snr"],
m["rssi"],
m["hop_limit"],
]
with_busy_retry do
existing = db.get_first_row("SELECT from_id FROM messages WHERE id = ?", [msg_id])
existing = db.get_first_row(
"SELECT from_id, to_id, encrypted FROM messages WHERE id = ?",
[msg_id],
)
if existing
updates = {}
if from_id
existing_from = existing.is_a?(Hash) ? existing["from_id"] : existing[0]
existing_from_str = existing_from&.to_s
should_update = existing_from_str.nil? || existing_from_str.strip.empty?
should_update ||= existing_from != from_id
db.execute("UPDATE messages SET from_id = ? WHERE id = ?", [from_id, msg_id]) if should_update
updates["from_id"] = from_id if should_update
end
if to_id
existing_to = existing.is_a?(Hash) ? existing["to_id"] : existing[1]
existing_to_str = existing_to&.to_s
should_update = existing_to_str.nil? || existing_to_str.strip.empty?
should_update ||= existing_to != to_id
updates["to_id"] = to_id if should_update
end
if encrypted
existing_encrypted = existing.is_a?(Hash) ? existing["encrypted"] : existing[2]
existing_encrypted_str = existing_encrypted&.to_s
should_update = existing_encrypted_str.nil? || existing_encrypted_str.strip.empty?
should_update ||= existing_encrypted != encrypted
updates["encrypted"] = encrypted if should_update
end
unless updates.empty?
assignments = updates.keys.map { |column| "#{column} = ?" }.join(", ")
db.execute("UPDATE messages SET #{assignments} WHERE id = ?", updates.values + [msg_id])
end
else
begin
db.execute <<~SQL, row
INSERT INTO messages(id,rx_time,rx_iso,from_id,to_id,channel,portnum,text,snr,rssi,hop_limit)
VALUES (?,?,?,?,?,?,?,?,?,?,?)
INSERT INTO messages(id,rx_time,rx_iso,from_id,to_id,channel,portnum,text,encrypted,snr,rssi,hop_limit)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?)
SQL
rescue SQLite3::ConstraintException
db.execute("UPDATE messages SET from_id = ? WHERE id = ?", [from_id, msg_id]) if from_id
fallback_updates = {}
fallback_updates["from_id"] = from_id if from_id
fallback_updates["to_id"] = to_id if to_id
fallback_updates["encrypted"] = encrypted if encrypted
unless fallback_updates.empty?
assignments = fallback_updates.keys.map { |column| "#{column} = ?" }.join(", ")
db.execute("UPDATE messages SET #{assignments} WHERE id = ?", fallback_updates.values + [msg_id])
end
end
end
end
@@ -536,17 +1200,63 @@ ensure
db&.close
end
# POST /api/positions
#
# Accepts an array or object describing position packets and stores each entry.
post "/api/positions" do
require_token!
content_type :json
begin
data = JSON.parse(read_json_body)
rescue JSON::ParserError
halt 400, { error: "invalid JSON" }.to_json
end
positions = data.is_a?(Array) ? data : [data]
halt 400, { error: "too many positions" }.to_json if positions.size > 1000
db = open_database
positions.each do |pos|
insert_position(db, pos)
end
{ status: "ok" }.to_json
ensure
db&.close
end
get "/potatomesh-logo.svg" do
# Sinatra знает корень через settings.root (обычно это каталог app.rb)
path = File.expand_path("potatomesh-logo.svg", settings.public_folder)
# отладка в лог (видно в docker logs)
settings.logger&.info("logo_path=#{path} exist=#{File.exist?(path)}
file=#{File.file?(path)}")
halt 404, "Not Found" unless File.exist?(path) && File.readable?(path)
content_type "image/svg+xml"
last_modified File.mtime(path)
cache_control :public, max_age: 3600
send_file path
end
# GET /
#
# Renders the main site with configuration-driven defaults for the template.
get "/" do
meta = meta_configuration
response.set_cookie("theme", value: "dark", path: "/", max_age: 60 * 60 * 24 * 7, same_site: :lax) unless request.cookies["theme"]
erb :index, locals: {
site_name: SITE_NAME,
default_channel: DEFAULT_CHANNEL,
default_frequency: DEFAULT_FREQUENCY,
site_name: meta[:name],
meta_title: meta[:title],
meta_name: meta[:name],
meta_description: meta[:description],
default_channel: sanitized_default_channel,
default_frequency: sanitized_default_frequency,
map_center_lat: MAP_CENTER_LAT,
map_center_lon: MAP_CENTER_LON,
max_node_distance_km: MAX_NODE_DISTANCE_KM,
matrix_room: MATRIX_ROOM,
matrix_room: sanitized_matrix_room,
version: APP_VERSION,
}
end

View File

@@ -17,4 +17,8 @@
set -euo pipefail
bundle install
exec ruby app.rb -p 41447 -o 127.0.0.1
PORT=${PORT:-41447}
BIND_ADDRESS=${BIND_ADDRESS:-0.0.0.0}
exec ruby app.rb -p "${PORT}" -o "${BIND_ADDRESS}"

BIN
web/public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

View File

@@ -38,6 +38,7 @@ RSpec.describe "Potato Mesh Sinatra app" do
with_db do |db|
db.execute("DELETE FROM messages")
db.execute("DELETE FROM nodes")
db.execute("DELETE FROM positions")
end
end
@@ -184,6 +185,28 @@ RSpec.describe "Potato Mesh Sinatra app" do
get "/"
expect(last_response).to be_ok
end
it "includes the application version in the footer" do
get "/"
expect(last_response.body).to include("#{APP_VERSION}")
end
it "includes SEO metadata from configuration" do
stub_const("SITE_NAME", "Spec Mesh Title")
stub_const("DEFAULT_CHANNEL", "#SpecChannel")
stub_const("DEFAULT_FREQUENCY", "915MHz")
stub_const("MAX_NODE_DISTANCE_KM", 120.5)
stub_const("MATRIX_ROOM", " #spec-room:example.org ")
expected_description = "Live Meshtastic mesh map for Spec Mesh Title on #SpecChannel (915MHz). Track nodes, messages, and coverage in real time. Shows nodes within roughly 120.5 km of the map center. Join the community in #spec-room:example.org on Matrix."
get "/"
expect(last_response.body).to include(%(meta name="description" content="#{expected_description}" />))
expect(last_response.body).to include('<meta property="og:title" content="Spec Mesh Title" />')
expect(last_response.body).to include('<meta property="og:site_name" content="Spec Mesh Title" />')
expect(last_response.body).to include('<meta name="twitter:image" content="http://example.org/potatomesh-logo.svg" />')
end
end
describe "database initialization" do
@@ -298,6 +321,65 @@ RSpec.describe "Potato Mesh Sinatra app" do
expect(JSON.parse(last_response.body)).to eq("error" => "invalid JSON")
end
it "updates timestamps when the payload omits lastHeard" do
node_id = "!spectime01"
payload = {
node_id => {
"user" => { "shortName" => "Spec Time" },
},
}
post "/api/nodes", payload.to_json, auth_headers
expect(last_response).to be_ok
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT last_heard, first_heard FROM nodes WHERE node_id = ?",
[node_id],
)
expect(row["last_heard"]).to eq(reference_time.to_i)
expect(row["first_heard"]).to eq(reference_time.to_i)
end
end
it "preserves the original first_heard when updating nodes" do
node_id = "!spectime02"
initial_first = reference_time.to_i - 600
initial_last = reference_time.to_i - 300
with_db do |db|
db.execute(
"INSERT INTO nodes(node_id, last_heard, first_heard) VALUES (?,?,?)",
[node_id, initial_last, initial_first],
)
end
payload = {
node_id => {
"user" => { "shortName" => "Spec Update" },
"lastHeard" => reference_time.to_i,
},
}
post "/api/nodes", payload.to_json, auth_headers
expect(last_response).to be_ok
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT last_heard, first_heard FROM nodes WHERE node_id = ?",
[node_id],
)
expect(row["last_heard"]).to eq(reference_time.to_i)
expect(row["first_heard"]).to eq(initial_first)
end
end
it "returns 400 when more than 1000 nodes are provided" do
payload = (0..1000).each_with_object({}) do |i, acc|
acc["node-#{i}"] = {}
@@ -391,6 +473,62 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
describe "#ensure_unknown_node" do
it "creates a hidden placeholder with timestamps for chat notifications" do
with_db do |db|
created = ensure_unknown_node(db, "!1234abcd", nil, heard_time: reference_time.to_i)
expect(created).to be_truthy
end
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
<<~SQL,
SELECT short_name, long_name, role, last_heard, first_heard
FROM nodes
WHERE node_id = ?
SQL
["!1234abcd"],
)
expect(row["short_name"]).to eq("ABCD")
expect(row["long_name"]).to eq("Meshtastic ABCD")
expect(row["role"]).to eq("CLIENT_HIDDEN")
expect(row["last_heard"]).to eq(reference_time.to_i)
expect(row["first_heard"]).to eq(reference_time.to_i)
end
end
it "leaves timestamps nil when no receive time is provided" do
with_db do |db|
created = ensure_unknown_node(db, "!1111beef", nil)
expect(created).to be_truthy
end
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
<<~SQL,
SELECT last_heard, first_heard
FROM nodes
WHERE node_id = ?
SQL
["!1111beef"],
)
expect(row["last_heard"]).to be_nil
expect(row["first_heard"]).to be_nil
end
end
it "returns false when the node already exists" do
with_db do |db|
expect(ensure_unknown_node(db, "!0000c0de", nil)).to be_truthy
expect(ensure_unknown_node(db, "!0000c0de", nil)).to be_falsey
end
end
end
describe "POST /api/messages" do
it "persists messages from fixture data" do
import_nodes_fixture
@@ -427,6 +565,41 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
it "creates hidden nodes for unknown message senders" do
payload = {
"id" => 9_999,
"rx_time" => reference_time.to_i,
"rx_iso" => reference_time.iso8601,
"from_id" => "!feedf00d",
"to_id" => "^all",
"channel" => 0,
"portnum" => "TEXT_MESSAGE_APP",
"text" => "Spec placeholder message",
}
post "/api/messages", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT node_id, num, short_name, long_name, role, last_heard, first_heard FROM nodes WHERE node_id = ?",
["!feedf00d"],
)
expect(row).not_to be_nil
expect(row["node_id"]).to eq("!feedf00d")
expect(row["num"]).to eq(0xfeedf00d)
expect(row["short_name"]).to eq("F00D")
expect(row["long_name"]).to eq("Meshtastic F00D")
expect(row["role"]).to eq("CLIENT_HIDDEN")
expect(row["last_heard"]).to eq(payload["rx_time"])
expect(row["first_heard"]).to eq(payload["rx_time"])
end
end
it "returns 400 when the payload is not valid JSON" do
post "/api/messages", "{", auth_headers
@@ -451,6 +624,225 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
describe "POST /api/positions" do
it "stores position packets and updates node metadata" do
node_id = "!specpos01"
node_num = 0x1234_5678
initial_last_heard = reference_time.to_i - 600
node_payload = {
node_id => {
"num" => node_num,
"user" => { "shortName" => "SpecPos" },
"lastHeard" => initial_last_heard,
"position" => {
"time" => initial_last_heard - 60,
"latitude" => 52.0,
"longitude" => 13.0,
},
},
}
post "/api/nodes", node_payload.to_json, auth_headers
expect(last_response).to be_ok
rx_time = reference_time.to_i - 120
position_time = rx_time - 30
raw_payload = { "time" => position_time, "latitude_i" => (52.5 * 1e7).to_i }
position_payload = {
"id" => 9_001,
"node_id" => node_id,
"node_num" => node_num,
"rx_time" => rx_time,
"rx_iso" => Time.at(rx_time).utc.iso8601,
"to_id" => "^all",
"latitude" => 52.5,
"longitude" => 13.4,
"altitude" => 42.0,
"position_time" => position_time,
"location_source" => "LOC_INTERNAL",
"precision_bits" => 15,
"sats_in_view" => 6,
"pdop" => 2.5,
"ground_speed" => 3.2,
"ground_track" => 180.0,
"snr" => -8.5,
"rssi" => -90,
"hop_limit" => 3,
"bitfield" => 1,
"payload_b64" => "AQI=",
"raw" => raw_payload,
}
post "/api/positions", position_payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row("SELECT * FROM positions WHERE id = ?", [9_001])
expect(row["node_id"]).to eq(node_id)
expect(row["node_num"]).to eq(node_num)
expect(row["rx_time"]).to eq(rx_time)
expect(row["rx_iso"]).to eq(Time.at(rx_time).utc.iso8601)
expect(row["position_time"]).to eq(position_time)
expect_same_value(row["latitude"], 52.5)
expect_same_value(row["longitude"], 13.4)
expect_same_value(row["altitude"], 42.0)
expect(row["location_source"]).to eq("LOC_INTERNAL")
expect(row["precision_bits"]).to eq(15)
expect(row["sats_in_view"]).to eq(6)
expect_same_value(row["pdop"], 2.5)
expect_same_value(row["ground_speed"], 3.2)
expect_same_value(row["ground_track"], 180.0)
expect_same_value(row["snr"], -8.5)
expect(row["rssi"]).to eq(-90)
expect(row["hop_limit"]).to eq(3)
expect(row["bitfield"]).to eq(1)
expect(row["payload_b64"]).to eq("AQI=")
end
with_db(readonly: true) do |db|
db.results_as_hash = true
node_row = db.get_first_row(
"SELECT last_heard, position_time, latitude, longitude, altitude, location_source, snr FROM nodes WHERE node_id = ?",
[node_id],
)
expect(node_row["last_heard"]).to eq(rx_time)
expect(node_row["position_time"]).to eq(position_time)
expect_same_value(node_row["latitude"], 52.5)
expect_same_value(node_row["longitude"], 13.4)
expect_same_value(node_row["altitude"], 42.0)
expect(node_row["location_source"]).to eq("LOC_INTERNAL")
expect_same_value(node_row["snr"], -8.5)
end
end
it "creates node records when none exist" do
node_id = "!specnew01"
node_num = 0xfeed_cafe
rx_time = reference_time.to_i - 60
position_time = rx_time - 10
payload = {
"id" => 9_002,
"node_id" => node_id,
"node_num" => node_num,
"rx_time" => rx_time,
"rx_iso" => Time.at(rx_time).utc.iso8601,
"latitude" => 52.1,
"longitude" => 13.1,
"altitude" => 33.0,
"position_time" => position_time,
"location_source" => "LOC_EXTERNAL",
}
post "/api/positions", payload.to_json, auth_headers
expect(last_response).to be_ok
with_db(readonly: true) do |db|
db.results_as_hash = true
node_row = db.get_first_row("SELECT * FROM nodes WHERE node_id = ?", [node_id])
expect(node_row).not_to be_nil
expect(node_row["num"]).to eq(node_num)
expect(node_row["last_heard"]).to eq(rx_time)
expect(node_row["first_heard"]).to eq(rx_time)
expect(node_row["position_time"]).to eq(position_time)
expect_same_value(node_row["latitude"], 52.1)
expect_same_value(node_row["longitude"], 13.1)
expect_same_value(node_row["altitude"], 33.0)
expect(node_row["location_source"]).to eq("LOC_EXTERNAL")
end
end
it "creates hidden nodes for unknown position senders" do
payload = {
"id" => 42,
"node_id" => "!0badc0de",
"rx_time" => reference_time.to_i,
"rx_iso" => reference_time.iso8601,
"latitude" => 52.1,
"longitude" => 13.1,
}
post "/api/positions", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT node_id, num, short_name, long_name, role FROM nodes WHERE node_id = ?",
["!0badc0de"],
)
expect(row).not_to be_nil
expect(row["node_id"]).to eq("!0badc0de")
expect(row["num"]).to eq(0x0badc0de)
expect(row["short_name"]).to eq("C0DE")
expect(row["long_name"]).to eq("Meshtastic C0DE")
expect(row["role"]).to eq("CLIENT_HIDDEN")
end
end
it "fills first_heard when updating an existing node without one" do
node_id = "!specposfh"
rx_time = reference_time.to_i - 90
with_db do |db|
db.execute(
"INSERT INTO nodes(node_id, last_heard, first_heard) VALUES (?,?,?)",
[node_id, nil, nil],
)
end
payload = {
"id" => 51,
"node_id" => node_id,
"rx_time" => rx_time,
"latitude" => 51.5,
"longitude" => -0.12,
}
post "/api/positions", payload.to_json, auth_headers
expect(last_response).to be_ok
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT last_heard, first_heard FROM nodes WHERE node_id = ?",
[node_id],
)
expect(row["last_heard"]).to eq(rx_time)
expect(row["first_heard"]).to eq(rx_time)
end
end
it "returns 400 when the payload is not valid JSON" do
post "/api/positions", "{", auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "invalid JSON")
end
it "returns 400 when more than 1000 positions are provided" do
payload = Array.new(1001) { |i| { "id" => i + 1, "rx_time" => reference_time.to_i - i } }
post "/api/positions", payload.to_json, auth_headers
expect(last_response.status).to eq(400)
expect(JSON.parse(last_response.body)).to eq("error" => "too many positions")
with_db(readonly: true) do |db|
count = db.get_first_value("SELECT COUNT(*) FROM positions")
expect(count).to eq(0)
end
end
end
it "returns 400 when more than 1000 messages are provided" do
payload = Array.new(1001) { |i| { "packet_id" => i + 1 } }
@@ -502,7 +894,9 @@ RSpec.describe "Potato Mesh Sinatra app" do
with_db(readonly: true) do |db|
db.results_as_hash = true
rows = db.execute("SELECT id, from_id, rx_time, rx_iso, text FROM messages ORDER BY id")
rows = db.execute(
"SELECT id, from_id, to_id, rx_time, rx_iso, text, encrypted FROM messages ORDER BY id",
)
expect(rows.size).to eq(2)
@@ -510,18 +904,116 @@ RSpec.describe "Potato Mesh Sinatra app" do
expect(first["id"]).to eq(101)
expect(first["from_id"]).to eq(node_id)
expect(first).not_to have_key("from_node_id")
expect(first).not_to have_key("from_node_num")
expect(first["rx_time"]).to eq(reference_time.to_i)
expect(first["rx_iso"]).to eq(reference_time.utc.iso8601)
expect(first["text"]).to eq("normalized")
expect(first).not_to have_key("to_node_id")
expect(first).not_to have_key("to_node_num")
expect(first["encrypted"]).to be_nil
expect(second["id"]).to eq(102)
expect(second["from_id"]).to be_nil
expect(second).not_to have_key("from_node_id")
expect(second).not_to have_key("from_node_num")
expect(second["rx_time"]).to eq(reference_time.to_i)
expect(second["rx_iso"]).to eq(reference_time.utc.iso8601)
expect(second["text"]).to eq("blank")
expect(second).not_to have_key("to_node_id")
expect(second).not_to have_key("to_node_num")
expect(second["encrypted"]).to be_nil
end
end
it "stores encrypted messages and resolves node references" do
sender_id = "!feedc0de"
sender_num = 0xfeedc0de
receiver_id = "!c0ffee99"
receiver_num = 0xc0ffee99
sender_node = {
"node_id" => sender_id,
"short_name" => "EncS",
"long_name" => "Encrypted Sender",
"hw_model" => "TEST",
"role" => "CLIENT",
"snr" => 5.5,
"battery_level" => 80.0,
"voltage" => 3.9,
"last_heard" => reference_time.to_i - 30,
"position_time" => reference_time.to_i - 60,
"latitude" => 52.1,
"longitude" => 13.1,
"altitude" => 42.0,
}
sender_payload = build_node_payload(sender_node)
sender_payload["num"] = sender_num
receiver_node = {
"node_id" => receiver_id,
"short_name" => "EncR",
"long_name" => "Encrypted Receiver",
"hw_model" => "TEST",
"role" => "CLIENT",
"snr" => 4.25,
"battery_level" => 75.0,
"voltage" => 3.8,
"last_heard" => reference_time.to_i - 40,
"position_time" => reference_time.to_i - 70,
"latitude" => 52.2,
"longitude" => 13.2,
"altitude" => 35.0,
}
receiver_payload = build_node_payload(receiver_node)
receiver_payload["num"] = receiver_num
post "/api/nodes", { sender_id => sender_payload }.to_json, auth_headers
expect(last_response).to be_ok
post "/api/nodes", { receiver_id => receiver_payload }.to_json, auth_headers
expect(last_response).to be_ok
encrypted_b64 = Base64.strict_encode64("secret message")
payload = {
"packet_id" => 777_001,
"rx_time" => reference_time.to_i,
"rx_iso" => reference_time.utc.iso8601,
"from_id" => sender_num.to_s,
"to_id" => receiver_id,
"channel" => 8,
"portnum" => "TEXT_MESSAGE_APP",
"encrypted" => encrypted_b64,
"snr" => -12.5,
"rssi" => -109,
"hop_limit" => 3,
}
post "/api/messages", payload.to_json, auth_headers
expect(last_response).to be_ok
expect(JSON.parse(last_response.body)).to eq("status" => "ok")
with_db(readonly: true) do |db|
db.results_as_hash = true
row = db.get_first_row(
"SELECT from_id, to_id, text, encrypted FROM messages WHERE id = ?",
[777_001],
)
expect(row["from_id"]).to eq(sender_id)
expect(row["to_id"]).to eq(receiver_id)
expect(row["text"]).to be_nil
expect(row["encrypted"]).to eq(encrypted_b64)
end
get "/api/messages"
expect(last_response).to be_ok
messages = JSON.parse(last_response.body)
expect(messages).to be_an(Array)
expect(messages).to be_empty
end
it "stores messages containing SQL control characters without executing them" do
payload = {
"packet_id" => 404,
@@ -725,11 +1217,28 @@ RSpec.describe "Potato Mesh Sinatra app" do
expect(actual_row["rx_time"]).to eq(expected["rx_time"])
expect(actual_row["rx_iso"]).to eq(expected["rx_iso"])
expect(actual_row["from_id"]).to eq(expected["from_id"])
expect(actual_row["to_id"]).to eq(expected["to_id"])
expected_from_id = expected["from_id"]
if expected_from_id.is_a?(String) && expected_from_id.match?(/\A[0-9]+\z/)
expected_from_id = node_aliases[expected_from_id] || expected_from_id
elsif expected_from_id.nil?
expected_from_id = message.dig("node", "node_id")
end
expect(actual_row["from_id"]).to eq(expected_from_id)
expect(actual_row).not_to have_key("from_node_id")
expect(actual_row).not_to have_key("from_node_num")
expected_to_id = expected["to_id"]
if expected_to_id.is_a?(String) && expected_to_id.match?(/\A[0-9]+\z/)
expected_to_id = node_aliases[expected_to_id] || expected_to_id
end
expect(actual_row["to_id"]).to eq(expected_to_id)
expect(actual_row).not_to have_key("to_node_id")
expect(actual_row).not_to have_key("to_node_num")
expect(actual_row["channel"]).to eq(expected["channel"])
expect(actual_row["portnum"]).to eq(expected["portnum"])
expect(actual_row["text"]).to eq(expected["text"])
expect(actual_row["encrypted"]).to eq(expected["encrypted"])
expect_same_value(actual_row["snr"], expected["snr"])
expect(actual_row["rssi"]).to eq(expected["rssi"])
expect(actual_row["hop_limit"]).to eq(expected["hop_limit"])
@@ -801,4 +1310,41 @@ RSpec.describe "Potato Mesh Sinatra app" do
end
end
end
describe "GET /api/positions" do
it "returns stored positions ordered by receive time" do
node_id = "!specfetch"
rx_times = [reference_time.to_i - 50, reference_time.to_i - 10]
rx_times.each_with_index do |rx_time, idx|
payload = {
"id" => 20_000 + idx,
"node_id" => node_id,
"rx_time" => rx_time,
"rx_iso" => Time.at(rx_time).utc.iso8601,
"position_time" => rx_time - 5,
"latitude" => 52.0 + idx,
"longitude" => 13.0 + idx,
"payload_b64" => "AQI=",
}
post "/api/positions", payload.to_json, auth_headers
expect(last_response).to be_ok
end
get "/api/positions?limit=1"
expect(last_response).to be_ok
data = JSON.parse(last_response.body)
expect(data.length).to eq(1)
entry = data.first
expect(entry["id"]).to eq(20_001)
expect(entry["node_id"]).to eq(node_id)
expect(entry["rx_time"]).to eq(rx_times.last)
expect(entry["rx_iso"]).to eq(Time.at(rx_times.last).utc.iso8601)
expect(entry["position_time"]).to eq(rx_times.last - 5)
expect(entry["position_time_iso"]).to eq(Time.at(rx_times.last - 5).utc.iso8601)
expect(entry["latitude"]).to eq(53.0)
expect(entry["longitude"]).to eq(14.0)
expect(entry["payload_b64"]).to eq("AQI=")
end
end
end

View File

@@ -18,11 +18,142 @@
<html lang="en">
<head>
<style>
:root {
--bg: #f6f3ee;
--bg2: #ffffff;
--fg: #0c0f12;
--muted: #5c6773;
--card: rgba(0,0,0,0.03);
--line: rgba(12,15,18,0.08);
--accent: #2b6cb0;
--row-alt: rgba(0,0,0,0.02);
--table-head-bg: rgba(0,0,0,0.06);
--table-head-fg: var(--fg);
}
body.dark {
--bg: #0e1418;
--bg2: #0e141b;
--fg: #e6ebf0;
--muted: #9aa7b4;
--card: rgba(255,255,255,0.04);
--line: rgba(255,255,255,0.10);
--accent: #5fa8ff;
--row-alt: rgba(255,255,255,0.05);
--table-head-bg: rgba(255,255,255,0.06);
--table-head-fg: var(--fg);
}
html, body {
background-color: var(--bg);
color: var(--fg);
background-image: var(--bg-image, none);
background-size: cover;
background-attachment: fixed;
transition: background-color 160ms ease, color 160ms ease;
}
a { color: var(--accent); }
hr { border-color: var(--line); }
.card, .panel, .box {
background: var(--card);
backdrop-filter: blur(2px);
border: 1px solid var(--line);
border-radius: 10px;
}
table { border-collapse: collapse; width: 100%; border: 1px solid
var(--line); }
thead th {
background: var(--table-head-bg);
color: var(--table-head-fg);
text-align: left;
border-bottom: 1px solid var(--line);
padding: 8px;
}
tbody td { padding: 8px; border-bottom: 1px solid var(--line); }
tbody tr:nth-child(even) td { background: var(--row-alt); }
.leaflet-container { background: transparent !important; color:
var(--fg); }
</style>
<script>
(function () {
var THEME_COOKIE_MAX_AGE = 60 * 60 * 24 * 7;
function getCookie(name) {
const m = document.cookie.match(new RegExp('(?:^|; )' +
name.replace(/([.$?*|{}()\[\]\\/+^])/g, '\\$1') + '=([^;]*)'));
return m ? decodeURIComponent(m[1]) : null;
}
function setCookie(name, value, opts) {
opts = Object.assign({ path: '/', 'max-age': THEME_COOKIE_MAX_AGE, SameSite:
'Lax' }, opts || {});
let updated = encodeURIComponent(name) + '=' +
encodeURIComponent(value);
for (const k in opts) {
updated += '; ' + k + (opts[k] === true ? '' : '=' + opts[k]);
}
document.cookie = updated;
}
function persistTheme(value) {
setCookie('theme', value, { 'max-age': THEME_COOKIE_MAX_AGE });
}
var theme = getCookie('theme');
if (theme !== 'dark' && theme !== 'light') {
theme = 'dark';
}
persistTheme(theme);
document.addEventListener('DOMContentLoaded', function () {
if (theme === 'dark') {
document.body.classList.add('dark');
} else {
document.body.classList.remove('dark');
}
var btn = document.getElementById('themeToggle');
if (btn) btn.textContent = document.body.classList.contains('dark') ?
'☀️' : '🌙';
if (typeof window.applyFiltersToAllTiles === 'function') {
window.applyFiltersToAllTiles();
}
});
window.__themeCookie = { getCookie, setCookie, persistTheme, maxAge: THEME_COOKIE_MAX_AGE };
})();
</script>
<meta name="color-scheme" content="dark light">
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<title><%= site_name %></title>
<% meta_title_html = Rack::Utils.escape_html(meta_title) %>
<% meta_name_html = Rack::Utils.escape_html(meta_name) %>
<% meta_description_html = Rack::Utils.escape_html(meta_description) %>
<% request_path = request.path.to_s.empty? ? "/" : request.path %>
<% canonical_url = "#{request.base_url}#{request_path}" %>
<% canonical_html = Rack::Utils.escape_html(canonical_url) %>
<% logo_url = "#{request.base_url}/potatomesh-logo.svg" %>
<% logo_url_html = Rack::Utils.escape_html(logo_url) %>
<% logo_alt_html = Rack::Utils.escape_html("#{meta_name} logo") %>
<title><%= meta_title_html %></title>
<meta name="application-name" content="<%= meta_name_html %>" />
<meta name="apple-mobile-web-app-title" content="<%= meta_name_html %>" />
<meta name="description" content="<%= meta_description_html %>" />
<link rel="canonical" href="<%= canonical_html %>" />
<meta property="og:title" content="<%= meta_title_html %>" />
<meta property="og:site_name" content="<%= meta_name_html %>" />
<meta property="og:description" content="<%= meta_description_html %>" />
<meta property="og:type" content="website" />
<meta property="og:url" content="<%= canonical_html %>" />
<meta property="og:image" content="<%= logo_url_html %>" />
<meta property="og:image:alt" content="<%= logo_alt_html %>" />
<meta name="twitter:card" content="summary" />
<meta name="twitter:title" content="<%= meta_title_html %>" />
<meta name="twitter:description" content="<%= meta_description_html %>" />
<meta name="twitter:image" content="<%= logo_url_html %>" />
<meta name="twitter:image:alt" content="<%= logo_alt_html %>" />
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
<link rel="icon" type="image/svg+xml" href="/potatomesh-logo.svg" />
<% refresh_interval_seconds = 60 %>
<% tile_filter_light = "grayscale(1) saturate(0) brightness(0.92) contrast(1.05)" %>
<% tile_filter_dark = "grayscale(1) invert(1) brightness(0.9) contrast(1.08)" %>
<!-- Leaflet CSS/JS (CDN) -->
<link
@@ -38,8 +169,17 @@
></script>
<style>
:root { --pad: 16px; }
body { font-family: system-ui, Segoe UI, Roboto, Ubuntu, Arial, sans-serif; margin: var(--pad); padding-bottom: 32px; }
:root {
--pad: 16px;
--map-tile-filter-light: <%= tile_filter_light %>;
--map-tile-filter-dark: <%= tile_filter_dark %>;
}
body {
font-family: system-ui, Segoe UI, Roboto, Ubuntu, Arial, sans-serif;
margin: var(--pad);
padding-bottom: 32px;
--map-tiles-filter: var(--map-tile-filter-light);
}
h1 { margin: 0 0 8px }
.site-title { display: inline-flex; align-items: center; gap: 12px; }
.site-title img { width: 52px; height: 52px; display: block; border-radius: 12px; }
@@ -70,7 +210,7 @@
.auto-refresh-toggle { display: inline-flex; align-items: center; gap: 6px; }
.controls { display: flex; gap: 8px; align-items: center; }
.controls label { display: inline-flex; align-items: center; gap: 6px; }
button { padding: 6px 10px; border: 1px solid #ccc; background: #fff; border-radius: 6px; cursor: pointer; }
button { padding: 6px 10px; border: 1px solid #ccc; background: #fff; border-radius: 6px; cursor: pointer; color: var(--fg); }
button:hover { background: #f6f6f6; }
.sort-button { padding: 0; border: none; background: none; color: inherit; font: inherit; cursor: pointer; display: inline-flex; align-items: center; gap: 4px; }
.sort-button:hover { background: none; }
@@ -79,9 +219,39 @@
th[aria-sort] .sort-indicator { opacity: 1; }
label { font-size: 14px; color: #333; }
input[type="text"] { padding: 6px 10px; border: 1px solid #ccc; border-radius: 6px; }
.legend { background: #fff; padding: 6px 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 12px; line-height: 18px; }
.legend span { display: inline-block; width: 12px; height: 12px; margin-right: 6px; vertical-align: middle; }
#map .leaflet-tile { filter: opacity(70%); }
.legend { position: relative; background: #fff; color: var(--fg); padding: 8px 10px 10px; border: 1px solid #ccc; border-radius: 8px; font-size: 12px; line-height: 18px; min-width: 160px; box-shadow: 0 4px 16px rgba(0, 0, 0, 0.12); }
.legend-header { display: flex; align-items: center; justify-content: flex-start; gap: 4px; margin-bottom: 6px; font-weight: 600; }
.legend-title { font-size: 13px; }
.legend-items { display: flex; flex-direction: column; gap: 2px; }
.legend-item {
display: flex;
align-items: center;
gap: 6px;
font: inherit;
color: inherit;
background: transparent;
border: 1px solid transparent;
border-radius: 4px;
padding: 3px 6px;
cursor: pointer;
width: 100%;
justify-content: flex-start;
text-align: left;
}
.legend-item:hover { background: rgba(0, 0, 0, 0.05); }
.legend-item:focus-visible { outline: 2px solid #4a90e2; outline-offset: 2px; }
.legend-item[aria-pressed="true"] { border-color: rgba(0, 0, 0, 0.2); background: rgba(0, 0, 0, 0.08); }
.legend-swatch { display: inline-block; width: 12px; height: 12px; border-radius: 2px; }
.legend-hidden { display: none !important; }
.legend-toggle { margin-top: 8px; }
.legend-toggle-button { font-size: 12px; color: var(--fg); }
#map .leaflet-tile-pane,
#map .leaflet-layer,
#map .leaflet-tile.map-tiles {
opacity: 0.75;
filter: var(--map-tiles-filter, var(--map-tile-filter-light));
-webkit-filter: var(--map-tiles-filter, var(--map-tile-filter-light));
}
.leaflet-popup-content-wrapper,
.leaflet-popup-tip {
background: #fff;
@@ -90,7 +260,7 @@
}
#nodes { font-size: 12px; }
footer { position: fixed; bottom: 0; left: var(--pad); width: calc(100% - 2 * var(--pad)); background: #fafafa; border-top: 1px solid #ddd; text-align: center; font-size: 12px; padding: 4px 0; }
.info-overlay { position: fixed; inset: 0; background: rgba(0, 0, 0, 0.45); display: flex; align-items: center; justify-content: center; padding: var(--pad); z-index: 1000; }
.info-overlay { position: fixed; inset: 0; background: rgba(0, 0, 0, 0.45); display: flex; align-items: center; justify-content: center; padding: var(--pad); z-index: 4000; }
.info-overlay[hidden] { display: none; }
.info-dialog { background: #fff; color: #111; max-width: 420px; width: min(100%, 420px); border-radius: 12px; box-shadow: 0 16px 40px rgba(0, 0, 0, 0.2); position: relative; padding: 20px 24px; outline: none; }
.info-dialog:focus { outline: 2px solid #4a90e2; outline-offset: 4px; }
@@ -115,7 +285,7 @@
}
}
@media (max-width: 768px) {
@media (max-width: 1024px) {
.row { flex-direction: column; align-items: stretch; gap: var(--pad); }
.site-title img { width: 44px; height: 44px; }
.map-row { flex-direction: column; }
@@ -145,10 +315,15 @@
#nodes td:nth-child(15) {
display: none;
}
.legend { max-width: min(240px, 80vw); }
}
/* Dark mode overrides */
body.dark { background: #111; color: #eee; }
body.dark {
background: #111;
color: #eee;
--map-tiles-filter: var(--map-tile-filter-dark);
}
body.dark .meta { color: #bbb; }
body.dark .refresh-info { color: #bbb; }
body.dark .pill { background: #444; }
@@ -161,7 +336,11 @@
body.dark .sort-button:hover { background: none; }
body.dark label { color: #ddd; }
body.dark input[type="text"] { background: #222; color: #eee; border-color: #444; }
body.dark .legend { background: #333; border-color: #444; color: #eee; }
body.dark .legend { background: #333; border-color: #444; color: #eee; box-shadow: 0 4px 16px rgba(0, 0, 0, 0.45); }
body.dark .legend-toggle-button { background: #333; border-color: #444; color: #eee; }
body.dark .legend-toggle-button:hover { background: #444; }
body.dark .legend-item:hover { background: rgba(255, 255, 255, 0.1); }
body.dark .legend-item[aria-pressed="true"] { border-color: rgba(255, 255, 255, 0.3); background: rgba(255, 255, 255, 0.16); }
body.dark .leaflet-popup-content-wrapper,
body.dark .leaflet-popup-tip {
background: #333;
@@ -182,6 +361,110 @@
body.dark .short-info-overlay { background: #1c1c1c; border-color: #444; color: #eee; box-shadow: 0 8px 24px rgba(0, 0, 0, 0.55); }
body.dark .short-info-overlay .short-info-close:hover { background: rgba(255, 255, 255, 0.1); }
</style>
<style id="map-tiles-light">
body:not(.dark) {
--map-tiles-filter: <%= tile_filter_light %>;
}
body:not(.dark) #map .leaflet-tile-pane,
body:not(.dark) #map .leaflet-layer,
body:not(.dark) #map .leaflet-tile.map-tiles {
filter: <%= tile_filter_light %>;
-webkit-filter: <%= tile_filter_light %>;
}
</style>
<style id="map-tiles-dark">
body.dark {
--map-tiles-filter: <%= tile_filter_dark %>;
}
body.dark #map .leaflet-tile-pane,
body.dark #map .leaflet-layer,
body.dark #map .leaflet-tile.map-tiles {
filter: <%= tile_filter_dark %>;
-webkit-filter: <%= tile_filter_dark %>;
}
</style>
<script>
(function(){
function xmur3(str){for(var i=0,h=1779033703^str.length;i<str.length;i++)h=Math.imul(h^str.charCodeAt(i),3432918353),h=h<<13|h>>>19;return function(){h=Math.imul(h^h>>>16,2246822507);h=Math.imul(h^h>>>13,3266489909);return (h^h>>>16)>>>0;};}
function mulberry32(a){return function(){var t=a+=0x6D2B79F5;t=Math.imul(t^t>>>15,t|1);t^=t+Math.imul(t^t>>>7,t|61);return((t^t>>>14)>>>0)/4294967296;}}
function genBackground(theme){
var seedInput = location.hostname + '::' + theme;
var seed = xmur3(seedInput)();
var rnd = mulberry32(seed);
var w = 1400, h = 900;
var c = document.createElement('canvas'); c.width=w; c.height=h;
var ctx = c.getContext('2d');
if(theme==='dark'){
var g = ctx.createLinearGradient(0,0,w,h);
g.addColorStop(0, '#0b1119');
g.addColorStop(1, '#121b27');
ctx.fillStyle = g; ctx.fillRect(0,0,w,h);
} else {
var g2 = ctx.createLinearGradient(0,0,w,h);
g2.addColorStop(0, '#efe8d9'); g2.addColorStop(1, '#dfe5ec');
ctx.fillStyle = g2; ctx.fillRect(0,0,w,h);
}
ctx.globalAlpha = (theme==='dark') ? 0.05 : 0.06;
for(var i=0;i<14000;i++){
var x = Math.floor(rnd()*w), y = Math.floor(rnd()*h);
var s = Math.floor(rnd()*2)+1;
ctx.fillStyle = (theme==='dark') ? '#ffffff' : '#000000';
ctx.fillRect(x,y,s,s);
}
var rad = ctx.createRadialGradient(w*0.5,h*0.5,Math.min(w,h)*0.2, w*0.5,h*0.5,Math.max(w,h)*0.7);
if(theme==='dark'){
rad.addColorStop(0,'rgba(0,0,0,0)');
rad.addColorStop(1,'rgba(0,0,0,0.20)');
} else {
rad.addColorStop(0,'rgba(255,255,255,0)');
rad.addColorStop(1,'rgba(255,255,255,0.22)');
}
ctx.globalAlpha = 1; ctx.fillStyle = rad; ctx.fillRect(0,0,w,h);
var url = c.toDataURL('image/png');
document.documentElement.style.setProperty('--bg-image', 'url('+url+')');
}
function currentTheme(){
return document.body.classList.contains('dark') ? 'dark' : 'light';
}
document.addEventListener('DOMContentLoaded', function(){
genBackground(currentTheme());
});
window.addEventListener('themechange', function(e){
var theme = e.detail && e.detail.theme || currentTheme();
genBackground(theme);
});
var obs = new MutationObserver(function(){ genBackground(currentTheme());});
obs.observe(document.documentElement, { attributes:true, attributeFilter:['class'] });
window.__regenBackground = genBackground;
})();
</script>
<style>
/* Make common wrappers transparent so the generated background is visible */
#app, main, .container, .content, .wrapper, .layout, .page, .root, body > div:first-child {
background: transparent !important;
}
/* Soften dark cards a bit to avoid heavy overlay */
body.dark .card, body.dark .panel, body.dark .box {
background: rgba(255,255,255,0.02);
border-color: rgba(255,255,255,0.07);
}
body.dark thead th {
background: rgba(255,255,255,0.04);
}
</style>
<style>
/* Dark theme: avoid any solid blocks that hide the background */
body.dark :is(#app, main, .container, .content, .wrapper, .page, .layout, .root, .section) {
background: rgba(255,255,255,0.04) !important;
}
/* Dark theme tables & boxes */
body.dark :is(.card, .panel, .box) {
background: var(--card) !important;
border-color: var(--line) !important;
}
/* Defensive: Leaflet map stays transparent */
body.dark .leaflet-container { background: transparent !important; }
</style>
</head>
<body>
<h1 class="site-title">
@@ -265,7 +548,11 @@
</div>
<footer>
PotatoMesh GitHub: <a href="https://github.com/l5yth/potato-mesh" target="_blank">l5yth/potato-mesh</a>
PotatoMesh
<% if version && !version.empty? %>
<span class="mono"><%= version %></span> —
<% end %>
GitHub: <a href="https://github.com/l5yth/potato-mesh" target="_blank">l5yth/potato-mesh</a>
<% if matrix_room && !matrix_room.empty? %>
— <%= site_name %> Matrix:
<a href="https://matrix.to/#/<%= matrix_room %>" target="_blank"><%= matrix_room %></a>
@@ -317,11 +604,10 @@
};
let allNodes = [];
let shortInfoAnchor = null;
const seenNodeIds = new Set();
const seenMessageIds = new Set();
let lastChatDate;
const NODE_LIMIT = 1000;
const CHAT_LIMIT = 1000;
const CHAT_RECENT_WINDOW_SECONDS = 7 * 24 * 60 * 60;
const REFRESH_MS = <%= refresh_interval_seconds * 1000 %>;
refreshInfo.textContent = `<%= default_channel %> (<%= default_frequency %>) — active nodes: …`;
@@ -445,52 +731,328 @@
const MAP_CENTER = L.latLng(<%= map_center_lat %>, <%= map_center_lon %>);
const MAX_NODE_DISTANCE_KM = <%= max_node_distance_km %>;
// Firmware 2.7.10 / Android 2.7.0 roles and colors (see issue #177)
const roleColors = Object.freeze({
CLIENT: '#A8D5BA',
CLIENT_HIDDEN: '#B8DCA9',
CLIENT_MUTE: '#D2E3A2',
TRACKER: '#E8E6A1',
SENSOR: '#F4E3A3',
LOST_AND_FOUND: '#F9D4A6',
CLIENT_HIDDEN: '#A9CBE8',
SENSOR: '#A8D5BA',
TRACKER: '#B9DFAC',
CLIENT_MUTE: '#CDE7A9',
CLIENT: '#E8E6A1',
CLIENT_BASE: '#F6D0A6',
REPEATER: '#F7B7A3',
ROUTER_LATE: '#F29AA3',
ROUTER: '#E88B94'
ROUTER: '#E88B94',
LOST_AND_FOUND: '#C3A8E8'
});
const roleRenderOrder = Object.freeze({
CLIENT_HIDDEN: 1,
SENSOR: 2,
TRACKER: 3,
CLIENT_MUTE: 4,
CLIENT: 5,
CLIENT_BASE: 6,
REPEATER: 7,
ROUTER_LATE: 8,
ROUTER: 9,
LOST_AND_FOUND: 10
});
const activeRoleFilters = new Set();
const legendRoleButtons = new Map();
function normalizeRole(role) {
if (role == null) return 'CLIENT';
const str = String(role).trim();
return str.length ? str : 'CLIENT';
}
function getRoleKey(role) {
const normalized = normalizeRole(role);
if (roleColors[normalized]) return normalized;
const upper = normalized.toUpperCase();
if (roleColors[upper]) return upper;
return normalized;
}
function getRoleColor(role) {
const key = getRoleKey(role);
return roleColors[key] || roleColors.CLIENT || '#3388ff';
}
function getRoleRenderPriority(role) {
const key = getRoleKey(role);
const priority = roleRenderOrder[key];
return typeof priority === 'number' ? priority : 0;
}
// --- Map setup ---
const map = L.map('map', { worldCopyJump: true });
const lightTiles = L.tileLayer('https://tiles.stadiamaps.com/tiles/alidade_smooth/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: '&copy; OpenStreetMap contributors &amp; Stadia Maps'
const map = L.map('map', { worldCopyJump: true, attributionControl: false });
const TILE_LAYER_URL = 'https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png';
const TILE_FILTER_LIGHT = '<%= tile_filter_light %>';
const TILE_FILTER_DARK = '<%= tile_filter_dark %>';
function resolveTileFilter() {
return document.body.classList.contains('dark') ? TILE_FILTER_DARK : TILE_FILTER_LIGHT;
}
function applyFilterToTileElement(tile, filterValue) {
if (!tile) return;
if (tile.classList && !tile.classList.contains('map-tiles')) {
tile.classList.add('map-tiles');
}
const value = filterValue || resolveTileFilter();
if (tile.style) {
tile.style.filter = value;
tile.style.webkitFilter = value;
}
}
function applyFilterToTileContainers(filterValue) {
const value = filterValue || resolveTileFilter();
const tileContainer = tiles && typeof tiles.getContainer === 'function' ? tiles.getContainer() : null;
if (tileContainer && tileContainer.style) {
tileContainer.style.filter = value;
tileContainer.style.webkitFilter = value;
}
const tilePane = map && typeof map.getPane === 'function' ? map.getPane('tilePane') : null;
if (tilePane && tilePane.style) {
tilePane.style.filter = value;
tilePane.style.webkitFilter = value;
}
}
function ensureTileHasCurrentFilter(tile) {
if (!tile) return;
const filterValue = resolveTileFilter();
applyFilterToTileElement(tile, filterValue);
}
function applyFiltersToAllTiles() {
const filterValue = resolveTileFilter();
document.body.style.setProperty('--map-tiles-filter', filterValue);
const tileEls = document.querySelectorAll('#map .leaflet-tile');
tileEls.forEach(tile => applyFilterToTileElement(tile, filterValue));
applyFilterToTileContainers(filterValue);
}
const tiles = L.tileLayer(TILE_LAYER_URL, {
maxZoom: 19,
className: 'map-tiles',
crossOrigin: 'anonymous'
});
const darkTiles = L.tileLayer('https://tiles.stadiamaps.com/tiles/alidade_smooth_dark/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: '&copy; OpenStreetMap contributors &amp; Stadia Maps'
let tileDomObserver = null;
function observeTileContainer() {
if (typeof MutationObserver !== 'function') return;
const container = tiles && typeof tiles.getContainer === 'function' ? tiles.getContainer() : null;
const tilePane = map && typeof map.getPane === 'function' ? map.getPane('tilePane') : null;
const targets = [];
if (container) targets.push(container);
if (tilePane && !targets.includes(tilePane)) targets.push(tilePane);
if (!targets.length) return;
if (tileDomObserver) {
tileDomObserver.disconnect();
}
const handleNode = (node, filterValue) => {
if (!node || node.nodeType !== 1) return;
if (node.classList && node.classList.contains('leaflet-tile')) {
applyFilterToTileElement(node, filterValue);
}
if (typeof node.querySelectorAll === 'function') {
const nestedTiles = node.querySelectorAll('.leaflet-tile');
nestedTiles.forEach(tile => applyFilterToTileElement(tile, filterValue));
}
};
tileDomObserver = new MutationObserver(mutations => {
const filterValue = resolveTileFilter();
mutations.forEach(mutation => {
mutation.addedNodes.forEach(node => handleNode(node, filterValue));
});
applyFilterToTileContainers(filterValue);
});
targets.forEach(target => tileDomObserver.observe(target, { childList: true, subtree: true }));
}
tiles.on('tileloadstart', event => {
if (!event || !event.tile) return;
ensureTileHasCurrentFilter(event.tile);
applyFilterToTileContainers();
});
let tiles = lightTiles.addTo(map);
tiles.on('tileload', event => {
if (!event || !event.tile) return;
ensureTileHasCurrentFilter(event.tile);
applyFilterToTileContainers();
});
tiles.on('load', () => {
applyFiltersToAllTiles();
observeTileContainer();
});
tiles.addTo(map);
observeTileContainer();
// Default view until first data arrives
map.setView(MAP_CENTER, 10);
applyFiltersToAllTiles();
map.on('moveend', applyFiltersToAllTiles);
map.on('zoomend', applyFiltersToAllTiles);
const markersLayer = L.layerGroup().addTo(map);
let legendContainer = null;
let legendToggleButton = null;
let legendVisible = true;
function updateLegendToggleState() {
if (!legendToggleButton) return;
const hasFilters = activeRoleFilters.size > 0;
legendToggleButton.setAttribute('aria-pressed', legendVisible ? 'true' : 'false');
const baseLabel = legendVisible ? 'Hide map legend' : 'Show map legend';
const baseText = legendVisible ? 'Hide legend' : 'Show legend';
const labelSuffix = hasFilters ? ' (role filters active)' : '';
const textSuffix = ' (filters)';
legendToggleButton.setAttribute('aria-label', baseLabel + labelSuffix);
legendToggleButton.textContent = baseText + textSuffix;
if (hasFilters) {
legendToggleButton.setAttribute('data-has-active-filters', 'true');
} else {
legendToggleButton.removeAttribute('data-has-active-filters');
}
}
function setLegendVisibility(visible) {
legendVisible = visible;
if (legendContainer) {
legendContainer.classList.toggle('legend-hidden', !visible);
legendContainer.setAttribute('aria-hidden', visible ? 'false' : 'true');
}
updateLegendToggleState();
}
function updateLegendRoleFiltersUI() {
const hasFilters = activeRoleFilters.size > 0;
legendRoleButtons.forEach((button, role) => {
if (!button) return;
const isActive = activeRoleFilters.has(role);
button.setAttribute('aria-pressed', isActive ? 'true' : 'false');
});
if (legendContainer) {
if (hasFilters) {
legendContainer.setAttribute('data-has-active-filters', 'true');
} else {
legendContainer.removeAttribute('data-has-active-filters');
}
}
updateLegendToggleState();
}
function toggleRoleFilter(role) {
if (!role) return;
if (activeRoleFilters.has(role)) {
activeRoleFilters.delete(role);
} else {
activeRoleFilters.add(role);
}
updateLegendRoleFiltersUI();
applyFilter();
}
const legend = L.control({ position: 'bottomright' });
legend.onAdd = function () {
const div = L.DomUtil.create('div', 'legend');
div.id = 'mapLegend';
div.setAttribute('role', 'region');
div.setAttribute('aria-label', 'Map legend');
legendContainer = div;
const header = L.DomUtil.create('div', 'legend-header', div);
const title = L.DomUtil.create('span', 'legend-title', header);
title.textContent = 'Legend';
const itemsContainer = L.DomUtil.create('div', 'legend-items', div);
legendRoleButtons.clear();
for (const [role, color] of Object.entries(roleColors)) {
div.innerHTML += `<div><span style="background:${color}"></span>${role}</div>`;
const item = L.DomUtil.create('button', 'legend-item', itemsContainer);
item.type = 'button';
item.setAttribute('aria-pressed', 'false');
item.dataset.role = role;
const swatch = L.DomUtil.create('span', 'legend-swatch', item);
swatch.style.background = color;
swatch.setAttribute('aria-hidden', 'true');
const label = L.DomUtil.create('span', 'legend-label', item);
label.textContent = role;
item.addEventListener('click', event => {
event.preventDefault();
event.stopPropagation();
const exclusive = event.metaKey || event.ctrlKey;
if (exclusive) {
activeRoleFilters.clear();
activeRoleFilters.add(role);
updateLegendRoleFiltersUI();
applyFilter();
} else {
toggleRoleFilter(role);
}
});
legendRoleButtons.set(role, item);
}
updateLegendRoleFiltersUI();
L.DomEvent.disableClickPropagation(div);
L.DomEvent.disableScrollPropagation(div);
return div;
};
legend.addTo(map);
legendContainer = legend.getContainer();
themeToggle.addEventListener('click', () => {
const dark = document.body.classList.toggle('dark');
themeToggle.textContent = dark ? '☀️' : '🌙';
map.removeLayer(tiles);
tiles = dark ? darkTiles : lightTiles;
tiles.addTo(map);
const legendToggleControl = L.control({ position: 'bottomright' });
legendToggleControl.onAdd = function () {
const container = L.DomUtil.create('div', 'leaflet-control legend-toggle');
const button = L.DomUtil.create('button', 'legend-toggle-button', container);
button.type = 'button';
button.textContent = 'Hide legend (filters)';
button.setAttribute('aria-pressed', 'true');
button.setAttribute('aria-label', 'Hide map legend');
button.setAttribute('aria-controls', 'mapLegend');
button.addEventListener('click', event => {
event.preventDefault();
event.stopPropagation();
setLegendVisibility(!legendVisible);
});
legendToggleButton = button;
updateLegendToggleState();
L.DomEvent.disableClickPropagation(container);
L.DomEvent.disableScrollPropagation(container);
return container;
};
legendToggleControl.addTo(map);
const legendMediaQuery = window.matchMedia('(max-width: 1024px)');
setLegendVisibility(!legendMediaQuery.matches);
legendMediaQuery.addEventListener('change', event => {
setLegendVisibility(!event.matches);
});
themeToggle.addEventListener('click', () => {
const dark = document.body.classList.toggle('dark');
const themeValue = dark ? 'dark' : 'light';
themeToggle.textContent = dark ? '☀️' : '🌙';
if (window.__themeCookie) {
if (typeof window.__themeCookie.persistTheme === 'function') {
window.__themeCookie.persistTheme(themeValue);
} else if (typeof window.__themeCookie.setCookie === 'function') {
window.__themeCookie.setCookie('theme', themeValue);
}
}
window.dispatchEvent(new CustomEvent('themechange', { detail: { theme: themeValue } }));
if (typeof window.applyFiltersToAllTiles === 'function') window.applyFiltersToAllTiles();
});
let lastFocusBeforeInfo = null;
function openInfoOverlay() {
@@ -590,14 +1152,14 @@
function renderShortHtml(short, role, longName, nodeData = null){
const safeTitle = longName ? escapeHtml(String(longName)) : '';
const titleAttr = safeTitle ? ` title="${safeTitle}"` : '';
const resolvedRole = role || (nodeData && nodeData.role) || 'CLIENT';
const roleValue = normalizeRole(role != null && role !== '' ? role : (nodeData && nodeData.role));
let infoAttr = '';
if (nodeData && typeof nodeData === 'object') {
const info = {
nodeId: nodeData.node_id ?? nodeData.nodeId ?? '',
shortName: short != null ? String(short) : (nodeData.short_name ?? ''),
longName: nodeData.long_name ?? longName ?? '',
role: resolvedRole,
role: roleValue,
hwModel: nodeData.hw_model ?? nodeData.hwModel ?? '',
battery: nodeData.battery_level ?? nodeData.battery ?? null,
voltage: nodeData.voltage ?? null,
@@ -611,7 +1173,7 @@
return `<span class="short-name" style="background:#ccc"${titleAttr}${infoAttr}>?&nbsp;&nbsp;&nbsp;</span>`;
}
const padded = escapeHtml(String(short).padStart(4, ' ')).replace(/ /g, '&nbsp;');
const color = roleColors[resolvedRole] || roleColors.CLIENT;
const color = getRoleColor(roleValue);
return `<span class="short-name" style="background:${color}"${titleAttr}${infoAttr}>${padded}</span>`;
}
@@ -682,16 +1244,8 @@
requestAnimationFrame(positionShortInfoOverlay);
}
function appendChatEntry(div) {
chatEl.appendChild(div);
while (chatEl.childElementCount > CHAT_LIMIT) {
chatEl.removeChild(chatEl.firstChild);
}
chatEl.scrollTop = chatEl.scrollHeight;
}
function maybeAddDateDivider(ts) {
if (!ts) return;
function maybeCreateDateDivider(ts) {
if (!ts) return null;
const d = new Date(ts * 1000);
const key = `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`;
if (lastChatDate !== key) {
@@ -701,30 +1255,72 @@
const div = document.createElement('div');
div.className = 'chat-entry-date';
div.textContent = `-- ${formatDate(midnight)} --`;
appendChatEntry(div);
return div;
}
return null;
}
function addNewNodeChatEntry(n) {
maybeAddDateDivider(n.first_heard);
function createNodeChatEntry(n) {
const div = document.createElement('div');
const ts = formatTime(new Date(n.first_heard * 1000));
div.className = 'chat-entry-node';
const short = renderShortHtml(n.short_name, n.role, n.long_name, n);
const longName = escapeHtml(n.long_name || '');
div.innerHTML = `[${ts}] ${short} <em>New node: ${longName}</em>`;
appendChatEntry(div);
return div;
}
function addNewMessageChatEntry(m) {
maybeAddDateDivider(m.rx_time);
function createMessageChatEntry(m) {
const div = document.createElement('div');
const ts = formatTime(new Date(m.rx_time * 1000));
const short = renderShortHtml(m.node?.short_name, m.node?.role, m.node?.long_name, m.node);
const text = escapeHtml(m.text || '');
div.className = 'chat-entry-msg';
div.innerHTML = `[${ts}] ${short} ${text}`;
appendChatEntry(div);
return div;
}
function renderChatLog(nodes, messages) {
if (!chatEl) return;
const entries = [];
for (const n of nodes || []) {
entries.push({ type: 'node', ts: n.first_heard ?? 0, item: n });
}
for (const m of messages || []) {
if (!m || m.encrypted) continue;
entries.push({ type: 'msg', ts: m.rx_time ?? 0, item: m });
}
const nowSeconds = Math.floor(Date.now() / 1000);
const cutoff = nowSeconds - CHAT_RECENT_WINDOW_SECONDS;
const recentEntries = entries.filter(entry => {
if (entry == null) return false;
const rawTs = entry.ts;
if (rawTs == null) return false;
const ts = typeof rawTs === 'number' ? rawTs : Number(rawTs);
if (!Number.isFinite(ts)) return false;
entry.ts = ts;
return ts >= cutoff;
});
recentEntries.sort((a, b) => {
if (a.ts !== b.ts) return a.ts - b.ts;
return a.type === 'node' && b.type === 'msg' ? -1 : a.type === 'msg' && b.type === 'node' ? 1 : 0;
});
const frag = document.createDocumentFragment();
lastChatDate = null;
for (const entry of recentEntries) {
const divider = maybeCreateDateDivider(entry.ts);
if (divider) frag.appendChild(divider);
if (entry.type === 'node') {
frag.appendChild(createNodeChatEntry(entry.item));
} else {
frag.appendChild(createMessageChatEntry(entry.item));
}
}
chatEl.replaceChildren(frag);
while (chatEl.childElementCount > CHAT_LIMIT) {
chatEl.removeChild(chatEl.firstChild);
}
chatEl.scrollTop = chatEl.scrollHeight;
}
function pad(n) { return String(n).padStart(2, "0"); }
@@ -761,6 +1357,27 @@
return Number.isFinite(n) ? `${n.toFixed(d)}%` : "";
}
function normalizeNodeNameValue(value) {
if (value == null) return '';
const str = String(value).trim();
return str.length ? str : '';
}
function applyNodeNameFallback(node) {
if (!node || typeof node !== 'object') return;
const short = normalizeNodeNameValue(node.short_name ?? node.shortName);
const long = normalizeNodeNameValue(node.long_name ?? node.longName);
if (short || long) return;
const nodeId = normalizeNodeNameValue(node.node_id ?? node.nodeId);
if (!nodeId) return;
const fallbackShort = nodeId.slice(-4);
const fallbackLong = `Meshtastic ${nodeId}`;
node.short_name = fallbackShort;
node.long_name = fallbackLong;
if ('shortName' in node) node.shortName = fallbackShort;
if ('longName' in node) node.longName = fallbackLong;
}
function timeHum(unixSec) {
if (!unixSec) return "";
if (unixSec < 0) return "0s";
@@ -842,14 +1459,24 @@
function renderMap(nodes, nowSec) {
markersLayer.clearLayers();
const pts = [];
for (const n of nodes) {
const nodesByRenderOrder = nodes
.map((node, index) => ({ node, index }))
.sort((a, b) => {
const orderA = getRoleRenderPriority(a.node && a.node.role);
const orderB = getRoleRenderPriority(b.node && b.node.role);
if (orderA !== orderB) return orderA - orderB;
return a.index - b.index;
})
.map(entry => entry.node);
for (const n of nodesByRenderOrder) {
const latRaw = n.latitude, lonRaw = n.longitude;
if (latRaw == null || latRaw === '' || lonRaw == null || lonRaw === '') continue;
const lat = Number(latRaw), lon = Number(lonRaw);
if (!Number.isFinite(lat) || !Number.isFinite(lon)) continue;
if (n.distance_km != null && n.distance_km > MAX_NODE_DISTANCE_KM) continue;
const color = roleColors[n.role] || '#3388ff';
const color = getRoleColor(n.role);
const marker = L.circleMarker([lat, lon], {
radius: 9,
color: '#000',
@@ -878,14 +1505,23 @@
}
}
function matchesTextFilter(node, query) {
if (!query) return true;
return [node?.node_id, node?.short_name, node?.long_name]
.filter(value => value != null && value !== '')
.some(value => String(value).toLowerCase().includes(query));
}
function matchesRoleFilter(node) {
if (!activeRoleFilters.size) return true;
const roleKey = getRoleKey(node && node.role);
return activeRoleFilters.has(roleKey);
}
function applyFilter() {
const rawQuery = filterInput ? filterInput.value : '';
const q = rawQuery.trim().toLowerCase();
const filteredNodes = !q ? allNodes.slice() : allNodes.filter(n => {
return [n.node_id, n.short_name, n.long_name]
.filter(value => value != null && value !== '')
.some(value => String(value).toLowerCase().includes(q));
});
const filteredNodes = allNodes.filter(n => matchesTextFilter(n, q) && matchesRoleFilter(n));
const sortedNodes = sortNodes(filteredNodes);
const nowSec = Date.now()/1000;
renderTable(sortedNodes, nowSec);
@@ -903,36 +1539,13 @@
try {
statusEl.textContent = 'refreshing…';
const nodes = await fetchNodes();
nodes.forEach(applyNodeNameFallback);
computeDistances(nodes);
const newNodes = [];
for (const n of nodes) {
if (n.node_id && !seenNodeIds.has(n.node_id)) {
newNodes.push(n);
}
}
const messages = await fetchMessages();
const newMessages = [];
for (const m of messages) {
if (m.id && !seenMessageIds.has(m.id)) {
newMessages.push(m);
}
}
const entries = [];
for (const n of newNodes) entries.push({ type: 'node', ts: n.first_heard ?? 0, item: n });
for (const m of newMessages) entries.push({ type: 'msg', ts: m.rx_time ?? 0, item: m });
entries.sort((a, b) => {
if (a.ts !== b.ts) return a.ts - b.ts;
return a.type === 'node' && b.type === 'msg' ? -1 : a.type === 'msg' && b.type === 'node' ? 1 : 0;
messages.forEach(message => {
if (message && message.node) applyNodeNameFallback(message.node);
});
for (const e of entries) {
if (e.type === 'node') {
addNewNodeChatEntry(e.item);
if (e.item.node_id) seenNodeIds.add(e.item.node_id);
} else {
addNewMessageChatEntry(e.item);
if (e.item.id) seenMessageIds.add(e.item.id);
}
}
renderChatLog(nodes, messages);
allNodes = nodes;
applyFilter();
statusEl.textContent = 'updated ' + new Date().toLocaleTimeString();