mirror of
https://github.com/jorijn/meshcore-stats.git
synced 2026-03-28 17:42:55 +01:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c199ace4a2 | ||
|
|
f7923b9434 | ||
|
|
c978844271 | ||
|
|
64cc352b80 | ||
|
|
e37aef6c5e | ||
|
|
81b7c6897a | ||
|
|
a3015e2209 | ||
|
|
5545ce5b28 | ||
|
|
666ed4215f | ||
|
|
3d0d90304c | ||
|
|
6afc14e007 | ||
|
|
4c5a408604 | ||
|
|
3c5eace220 | ||
|
|
7eee23ec40 | ||
|
|
30de7c20f3 | ||
|
|
19fa04c202 | ||
|
|
6ac52629d3 | ||
|
|
5b43f9ed12 | ||
|
|
0fe6c66ed8 | ||
|
|
2730a9d906 | ||
|
|
5e5d63fca3 | ||
|
|
92b2286e18 | ||
|
|
6776c2c429 | ||
|
|
e3a1df4aa6 | ||
|
|
f7f3889e41 | ||
|
|
be86404d8b | ||
|
|
7ba5ed37d4 | ||
|
|
046d7ead70 | ||
|
|
ee959d95a1 | ||
|
|
7a181e4b1a | ||
|
|
2bf04ce3f7 | ||
|
|
f47916cf82 |
@@ -1,29 +1,20 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(meshcore-cli:*)",
|
||||
"Bash(.direnv/python-3.14/bin/pip index:*)",
|
||||
"Bash(.direnv/python-3.14/bin/pip install:*)",
|
||||
"Bash(.direnv/python-3.12/bin/pip:*)",
|
||||
"Bash(rrdtool info:*)",
|
||||
"Bash(rrdtool fetch:*)",
|
||||
"Bash(cat:*)",
|
||||
"Bash(xargs cat:*)",
|
||||
"Bash(done)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(source .envrc)",
|
||||
"Bash(.direnv/python-3.12/bin/python:*)",
|
||||
"Bash(xargs:*)",
|
||||
"Bash(git add:*)",
|
||||
"Bash(git commit:*)",
|
||||
"Bash(git push)",
|
||||
"Bash(find:*)",
|
||||
"Bash(tree:*)",
|
||||
"Skill(frontend-design:frontend-design)",
|
||||
"Skill(frontend-design:frontend-design:*)",
|
||||
"Bash(direnv exec:*)",
|
||||
"Skill(frontend-design)",
|
||||
"Skill(frontend-design:*)"
|
||||
"Skill(frontend-design:*)",
|
||||
"Bash(gh run view:*)",
|
||||
"Bash(gh run list:*)",
|
||||
"Bash(gh release view:*)",
|
||||
"Bash(gh release list:*)",
|
||||
"Bash(gh workflow list:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
42
.dockerignore
Normal file
42
.dockerignore
Normal file
@@ -0,0 +1,42 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
.venv
|
||||
venv
|
||||
ENV
|
||||
env
|
||||
|
||||
# IDE
|
||||
.idea
|
||||
.vscode
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Project directories (generated/runtime)
|
||||
data/
|
||||
out/
|
||||
|
||||
# Configuration (use environment variables in Docker)
|
||||
.envrc
|
||||
.direnv
|
||||
meshcore.conf
|
||||
|
||||
# Documentation
|
||||
docs/
|
||||
*.md
|
||||
!README.md
|
||||
|
||||
# Development files
|
||||
.claude/
|
||||
*.log
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
235
.github/workflows/docker-publish.yml
vendored
Normal file
235
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
# Build and publish Docker images to GitHub Container Registry
|
||||
#
|
||||
# Triggers:
|
||||
# - On release: Build with version tags (X.Y.Z, X.Y, latest)
|
||||
# - On schedule: Rebuild all tags with fresh base image (OS patches)
|
||||
# - Manual: For testing, optional push
|
||||
#
|
||||
# Security:
|
||||
# - All actions pinned by SHA
|
||||
# - Vulnerability scanning with Trivy
|
||||
# - SBOM and provenance attestation
|
||||
|
||||
name: Docker Build and Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
schedule:
|
||||
# Daily at 4 AM UTC - rebuild with fresh base image
|
||||
- cron: "0 4 * * *"
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push:
|
||||
description: "Push image to registry"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
artifact-metadata: write
|
||||
|
||||
concurrency:
|
||||
group: docker-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
# For nightly builds, get the latest release version
|
||||
- name: Get latest release version
|
||||
id: get-version
|
||||
if: github.event_name == 'schedule'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get latest release tag
|
||||
LATEST_TAG=$(gh release view --json tagName -q '.tagName' 2>/dev/null || echo "")
|
||||
if [ -z "$LATEST_TAG" ]; then
|
||||
echo "No releases found, skipping nightly build"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
# Strip 'v' prefix if present
|
||||
VERSION=$(echo "$LATEST_TAG" | sed 's/^v//')
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Skip if no releases
|
||||
if: github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true'
|
||||
run: |
|
||||
echo "No releases found, skipping nightly build"
|
||||
exit 0
|
||||
|
||||
- name: Set up QEMU
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to Container Registry
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Generate tags based on event type
|
||||
- name: Extract metadata (release)
|
||||
id: meta-release
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
# X.Y.Z
|
||||
type=semver,pattern={{version}}
|
||||
# X.Y
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
# latest
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Extract metadata (nightly)
|
||||
id: meta-nightly
|
||||
if: github.event_name == 'schedule' && steps.get-version.outputs.skip != 'true'
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
# Rebuild version tags with OS patches
|
||||
type=raw,value=${{ steps.get-version.outputs.version }}
|
||||
# Nightly tags
|
||||
type=raw,value=nightly
|
||||
type=raw,value=nightly-{{date 'YYYYMMDD'}}
|
||||
# Also update latest with security patches
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Extract metadata (manual)
|
||||
id: meta-manual
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=sha,prefix=sha-
|
||||
|
||||
# Build image (release - with cache)
|
||||
- name: Build and push (release)
|
||||
id: build-release
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta-release.outputs.tags }}
|
||||
labels: ${{ steps.meta-release.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: true
|
||||
sbom: true
|
||||
|
||||
# Build image (nightly - no cache, fresh base)
|
||||
- name: Build and push (nightly)
|
||||
id: build-nightly
|
||||
if: github.event_name == 'schedule' && steps.get-version.outputs.skip != 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta-nightly.outputs.tags }}
|
||||
labels: ${{ steps.meta-nightly.outputs.labels }}
|
||||
pull: true
|
||||
no-cache: true
|
||||
provenance: true
|
||||
sbom: true
|
||||
|
||||
# Build image (manual)
|
||||
- name: Build and push (manual)
|
||||
id: build-manual
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ inputs.push }}
|
||||
tags: ${{ steps.meta-manual.outputs.tags }}
|
||||
labels: ${{ steps.meta-manual.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: true
|
||||
sbom: true
|
||||
|
||||
# Determine image tag for scanning and testing
|
||||
- name: Determine image tag
|
||||
id: image-tag
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "release" ]; then
|
||||
# Strip 'v' prefix to match semver tag format from metadata-action
|
||||
echo "tag=$(echo '${{ github.event.release.tag_name }}' | sed 's/^v//')" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event_name }}" = "schedule" ]; then
|
||||
echo "tag=nightly" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=sha-${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Vulnerability scanning
|
||||
- name: Run Trivy vulnerability scanner
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image-tag.outputs.tag }}
|
||||
format: "sarif"
|
||||
output: "trivy-results.sarif"
|
||||
severity: "CRITICAL,HIGH"
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload Trivy scan results
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true')"
|
||||
uses: github/codeql-action/upload-sarif@6e4b8622b82fab3c6ad2a7814fad1effc7615bc8 # v3.28.4
|
||||
with:
|
||||
sarif_file: "trivy-results.sarif"
|
||||
continue-on-error: true
|
||||
|
||||
# Smoke test - verify image runs correctly
|
||||
# Skip for manual runs when push is disabled (image not available to pull)
|
||||
- name: Smoke test
|
||||
if: "!(github.event_name == 'schedule' && steps.get-version.outputs.skip == 'true') && !(github.event_name == 'workflow_dispatch' && inputs.push == false)"
|
||||
run: |
|
||||
IMAGE_TAG="${{ steps.image-tag.outputs.tag }}"
|
||||
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${IMAGE_TAG}
|
||||
|
||||
# Test that Python and key modules are available
|
||||
docker run --rm ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${IMAGE_TAG} \
|
||||
python -c "from meshmon.db import init_db; from meshmon.env import get_config; print('Smoke test passed')"
|
||||
|
||||
# Attestation (releases only)
|
||||
- name: Generate attestation
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/attest-build-provenance@46a583fd92dfbf46b772907a9740f888f4324bb9 # v3.1.0
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-release.outputs.digest }}
|
||||
push-to-registry: true
|
||||
13
.github/workflows/release-please.yml
vendored
13
.github/workflows/release-please.yml
vendored
@@ -5,6 +5,17 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
# Note: We use a fine-grained PAT (RELEASE_PLEASE_TOKEN) instead of GITHUB_TOKEN
|
||||
# because GITHUB_TOKEN cannot trigger other workflows (like docker-publish.yml).
|
||||
# This is a GitHub security feature to prevent infinite workflow loops.
|
||||
#
|
||||
# The PAT requires these permissions (scoped to this repository only):
|
||||
# - Contents: Read and write (for creating releases and pushing tags)
|
||||
# - Pull requests: Read and write (for creating/updating release PRs)
|
||||
#
|
||||
# To rotate: Settings > Developer settings > Fine-grained tokens
|
||||
# Recommended rotation: Every 90 days
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
@@ -16,6 +27,6 @@ jobs:
|
||||
- name: Release Please
|
||||
uses: googleapis/release-please-action@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ secrets.RELEASE_PLEASE_TOKEN }}
|
||||
config-file: release-please-config.json
|
||||
manifest-file: .release-please-manifest.json
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,6 +17,9 @@ build/
|
||||
.env
|
||||
meshcore.conf
|
||||
|
||||
# Docker local overrides
|
||||
docker-compose.override.yml
|
||||
|
||||
# Data directories (keep structure, ignore content)
|
||||
data/snapshots/companion/**/*.json
|
||||
data/snapshots/repeater/**/*.json
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
".": "0.2.0"
|
||||
".": "0.2.9"
|
||||
}
|
||||
|
||||
90
CHANGELOG.md
90
CHANGELOG.md
@@ -4,6 +4,96 @@ All notable changes to this project will be documented in this file.
|
||||
|
||||
This changelog is automatically generated by [release-please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org/).
|
||||
|
||||
## [0.2.9](https://github.com/jorijn/meshcore-stats/compare/v0.2.8...v0.2.9) (2026-01-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* tooltip positioning and locale-aware time formatting ([f7923b9](https://github.com/jorijn/meshcore-stats/commit/f7923b94346c3d492e7291ecca208ab704176308))
|
||||
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
* add artifact-metadata permission for attestation storage records ([c978844](https://github.com/jorijn/meshcore-stats/commit/c978844271eafd35f4778d748d7c832309d1614f))
|
||||
|
||||
## [0.2.8](https://github.com/jorijn/meshcore-stats/compare/v0.2.7...v0.2.8) (2026-01-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* normalize reporting outputs and chart tooltips ([e37aef6](https://github.com/jorijn/meshcore-stats/commit/e37aef6c5e55d2077baf4ee35abdff0562983d69))
|
||||
|
||||
## [0.2.7](https://github.com/jorijn/meshcore-stats/compare/v0.2.6...v0.2.7) (2026-01-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add telemetry collection for companion and repeater nodes ([#24](https://github.com/jorijn/meshcore-stats/issues/24)) ([a3015e2](https://github.com/jorijn/meshcore-stats/commit/a3015e2209781bdd7c317fa992ced6afa19efe61))
|
||||
|
||||
## [0.2.6](https://github.com/jorijn/meshcore-stats/compare/v0.2.5...v0.2.6) (2026-01-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add tmpfs mount for fontconfig cache to fix read-only filesystem errors ([3d0d903](https://github.com/jorijn/meshcore-stats/commit/3d0d90304cec5ebcdb34935400de31afd62e258d))
|
||||
|
||||
## [0.2.5](https://github.com/jorijn/meshcore-stats/compare/v0.2.4...v0.2.5) (2026-01-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add automatic serial port locking to prevent concurrent access ([3c5eace](https://github.com/jorijn/meshcore-stats/commit/3c5eace2207279c55401dd8fa27294d5a94bb682))
|
||||
|
||||
|
||||
### Documentation
|
||||
|
||||
* fix formatting in architecture diagram ([7eee23e](https://github.com/jorijn/meshcore-stats/commit/7eee23ec40ff9441515b4ac18fbb7cd3f87fa4b5))
|
||||
|
||||
## [0.2.4](https://github.com/jorijn/meshcore-stats/compare/v0.2.3...v0.2.4) (2026-01-05)
|
||||
|
||||
|
||||
### Documentation
|
||||
|
||||
* rewrite README with Docker-first installation guide ([6ac5262](https://github.com/jorijn/meshcore-stats/commit/6ac52629d3025db69f9334d3185b97ce16cd3e4b))
|
||||
|
||||
## [0.2.3](https://github.com/jorijn/meshcore-stats/compare/v0.2.2...v0.2.3) (2026-01-05)
|
||||
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
* use fine-grained PAT for release-please to trigger Docker builds ([2730a9d](https://github.com/jorijn/meshcore-stats/commit/2730a9d906eeb5761af29dd69e8d4ebbfca50491))
|
||||
|
||||
## [0.2.2](https://github.com/jorijn/meshcore-stats/compare/v0.2.1...v0.2.2) (2026-01-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **ci:** skip smoke test when manual push is disabled ([e3a1df4](https://github.com/jorijn/meshcore-stats/commit/e3a1df4aa64bf87c32848be0d5c5e5ce16968186))
|
||||
* move serial device config to override file ([6776c2c](https://github.com/jorijn/meshcore-stats/commit/6776c2c4293b71f4649a42dcf6c517f3b44469b5))
|
||||
|
||||
## [0.2.1](https://github.com/jorijn/meshcore-stats/compare/v0.2.0...v0.2.1) (2026-01-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add Docker containerization with GitHub Actions CI/CD ([7ba5ed3](https://github.com/jorijn/meshcore-stats/commit/7ba5ed37d40d7c5e0a7e206cedcf6f70096759e5))
|
||||
* add Docker containerization with GitHub Actions CI/CD ([7a181e4](https://github.com/jorijn/meshcore-stats/commit/7a181e4b1ac581b2b897cd70aa77c5c983c6e80a))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* improve Docker configuration and documentation ([ee959d9](https://github.com/jorijn/meshcore-stats/commit/ee959d95a18afeeab47d41cc85ac435ba2a87016))
|
||||
|
||||
|
||||
### Miscellaneous Chores
|
||||
|
||||
* clean up permissions in settings.local.json ([f47916c](https://github.com/jorijn/meshcore-stats/commit/f47916cf82118bb30d80d901773f0bfaf2de315a))
|
||||
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* rename docker-compose.development.yml to docker-compose.dev.yml ([046d7ea](https://github.com/jorijn/meshcore-stats/commit/046d7ead708cc2807f10bcd36e1e1cf7494a8f45))
|
||||
|
||||
## [0.2.0](https://github.com/jorijn/meshcore-stats/compare/v0.1.0...v0.2.0) (2026-01-04)
|
||||
|
||||
|
||||
|
||||
124
CLAUDE.md
124
CLAUDE.md
@@ -16,6 +16,8 @@ Always edit the source templates, then regenerate with `python scripts/render_si
|
||||
|
||||
## Running Commands
|
||||
|
||||
**IMPORTANT: Always activate the virtual environment before running any Python commands.**
|
||||
|
||||
```bash
|
||||
cd /path/to/meshcore-stats
|
||||
source .venv/bin/activate
|
||||
@@ -171,6 +173,106 @@ Phase 3: Render → Static HTML site (inline SVG)
|
||||
Phase 4: Render → Reports (monthly/yearly statistics)
|
||||
```
|
||||
|
||||
## Docker Architecture
|
||||
|
||||
The project provides Docker containerization for easy deployment. Two containers work together:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Docker Compose │
|
||||
│ ┌─────────────────────┐ ┌─────────────────────────────────┐ │
|
||||
│ │ meshcore-stats │ │ nginx │ │
|
||||
│ │ ┌───────────────┐ │ │ │ │
|
||||
│ │ │ Ofelia │ │ │ Serves static site on :8080 │ │
|
||||
│ │ │ (scheduler) │ │ │ │ │
|
||||
│ │ └───────┬───────┘ │ └──────────────▲──────────────────┘ │
|
||||
│ │ │ │ │ │
|
||||
│ │ ┌──────▼──────┐ │ ┌─────────┴─────────┐ │
|
||||
│ │ │ Python │ │ │ output_data │ │
|
||||
│ │ │ Scripts │───┼────────►│ (named volume) │ │
|
||||
│ │ └─────────────┘ │ └───────────────────┘ │
|
||||
│ │ │ │ │
|
||||
│ └──────────┼──────────┘ │
|
||||
│ │ │
|
||||
│ ┌────────▼────────┐ │
|
||||
│ │ ./data/state │ │
|
||||
│ │ (bind mount) │ │
|
||||
│ └─────────────────┘ │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Container Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `Dockerfile` | Multi-stage build: Python + Ofelia scheduler |
|
||||
| `docker-compose.yml` | Production deployment using published ghcr.io image |
|
||||
| `docker-compose.dev.yml` | Development override for local builds |
|
||||
| `docker-compose.override.yml` | Local overrides (gitignored) |
|
||||
| `docker/ofelia.ini` | Scheduler configuration (cron jobs) |
|
||||
| `docker/nginx.conf` | nginx configuration for static site serving |
|
||||
| `.dockerignore` | Files excluded from Docker build context |
|
||||
|
||||
### Docker Compose Files
|
||||
|
||||
**Production** (`docker-compose.yml`):
|
||||
- Uses published image from `ghcr.io/jorijn/meshcore-stats`
|
||||
- Image version managed by release-please via `x-release-please-version` placeholder
|
||||
- Suitable for end users
|
||||
|
||||
**Development** (`docker-compose.dev.yml`):
|
||||
- Override file that builds locally instead of pulling from registry
|
||||
- Mounts `src/` and `scripts/` for live code changes
|
||||
- Usage: `docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build`
|
||||
|
||||
**Local overrides** (`docker-compose.override.yml`):
|
||||
- Gitignored file for local customizations (e.g., device paths, env_file)
|
||||
- Automatically merged when running `docker compose up`
|
||||
|
||||
### Ofelia Scheduler
|
||||
|
||||
[Ofelia](https://github.com/mcuadros/ofelia) is a lightweight job scheduler designed for Docker. It replaces cron for container environments.
|
||||
|
||||
Jobs configured in `docker/ofelia.ini`:
|
||||
- `collect-companion`: Every minute (with `no-overlap=true`)
|
||||
- `collect-repeater`: Every 15 minutes at :01, :16, :31, :46 (with `no-overlap=true`)
|
||||
- `render-charts`: Every 5 minutes
|
||||
- `render-site`: Every 5 minutes
|
||||
- `render-reports`: Daily at midnight
|
||||
- `db-maintenance`: Monthly at 3 AM on the 1st
|
||||
|
||||
### GitHub Actions Workflow
|
||||
|
||||
`.github/workflows/docker-publish.yml` builds and publishes Docker images:
|
||||
|
||||
| Trigger | Tags Created |
|
||||
|---------|--------------|
|
||||
| Release | `X.Y.Z`, `X.Y`, `latest` |
|
||||
| Nightly (4 AM UTC) | Rebuilds all version tags + `nightly`, `nightly-YYYYMMDD` |
|
||||
| Manual | `sha-xxxxxx` |
|
||||
|
||||
**Nightly rebuilds** ensure version tags always include the latest OS security patches. This is a common pattern used by official Docker images (nginx, postgres, node). Users needing reproducibility should pin by SHA digest or use dated nightly tags.
|
||||
|
||||
All GitHub Actions are pinned by full SHA for security. Dependabot can be configured to update these automatically.
|
||||
|
||||
### Version Placeholder
|
||||
|
||||
The version in `docker-compose.yml` uses release-please's placeholder syntax:
|
||||
```yaml
|
||||
image: ghcr.io/jorijn/meshcore-stats:0.3.0 # x-release-please-version
|
||||
```
|
||||
|
||||
This is automatically updated when a new release is created.
|
||||
|
||||
### Agent Review Guidelines
|
||||
|
||||
When reviewing Docker-related changes, always provide the **full plan or implementation** to review agents. Do not summarize or abbreviate - agents need complete context to provide accurate feedback.
|
||||
|
||||
Relevant agents for Docker reviews:
|
||||
- **k8s-security-reviewer**: Container security, RBAC, secrets handling
|
||||
- **cicd-pipeline-specialist**: GitHub Actions workflows, build pipelines
|
||||
- **python-code-reviewer**: Dockerfile Python-specific issues (venv PATH, runtime libs)
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
@@ -254,11 +356,17 @@ All configuration via `meshcore.conf` or environment variables. The config file
|
||||
|
||||
### Timeouts & Retry
|
||||
- `REMOTE_TIMEOUT_S`: Minimum timeout for LoRa requests (default: 10)
|
||||
- `REMOTE_RETRY_ATTEMPTS`: Number of retry attempts (default: 5)
|
||||
- `REMOTE_RETRY_ATTEMPTS`: Number of retry attempts (default: 2)
|
||||
- `REMOTE_RETRY_BACKOFF_S`: Seconds between retries (default: 4)
|
||||
- `REMOTE_CB_FAILS`: Failures before circuit breaker opens (default: 6)
|
||||
- `REMOTE_CB_COOLDOWN_S`: Circuit breaker cooldown (default: 3600)
|
||||
|
||||
### Telemetry Collection
|
||||
- `TELEMETRY_ENABLED`: Enable environmental telemetry collection from repeater (0/1, default: 0)
|
||||
- `TELEMETRY_TIMEOUT_S`: Timeout for telemetry requests (default: 10)
|
||||
- `TELEMETRY_RETRY_ATTEMPTS`: Retry attempts for telemetry (default: 2)
|
||||
- `TELEMETRY_RETRY_BACKOFF_S`: Backoff between telemetry retries (default: 4)
|
||||
|
||||
### Intervals
|
||||
- `COMPANION_STEP`: Collection interval for companion (default: 60s)
|
||||
- `REPEATER_STEP`: Collection interval for repeater (default: 900s / 15min)
|
||||
@@ -310,6 +418,12 @@ Metrics are classified as either **gauge** or **counter** in `src/meshmon/metric
|
||||
|
||||
Counter metrics are converted to rates during chart rendering by calculating deltas between consecutive readings.
|
||||
|
||||
- **TELEMETRY**: Environmental sensor data (when `TELEMETRY_ENABLED=1`):
|
||||
- Stored with `telemetry.` prefix: `telemetry.temperature.0`, `telemetry.humidity.0`, `telemetry.barometer.0`
|
||||
- Channel number distinguishes multiple sensors of the same type
|
||||
- Compound values (e.g., GPS) stored as: `telemetry.gps.0.latitude`, `telemetry.gps.0.longitude`
|
||||
- Telemetry collection does NOT affect circuit breaker state
|
||||
|
||||
## Database Schema
|
||||
|
||||
Metrics are stored in a SQLite database at `data/state/metrics.db` with WAL mode enabled for concurrent access.
|
||||
@@ -594,16 +708,14 @@ meshcore-cli -s /dev/ttyACM0 reset_path "repeater name"
|
||||
|
||||
## Cron Setup (Example)
|
||||
|
||||
Use `flock` to prevent USB serial conflicts when companion and repeater collection overlap.
|
||||
|
||||
```cron
|
||||
MESHCORE=/path/to/meshcore-stats
|
||||
|
||||
# Companion: every minute
|
||||
* * * * * cd $MESHCORE && flock -w 60 /tmp/meshcore.lock .venv/bin/python scripts/collect_companion.py
|
||||
* * * * * cd $MESHCORE && .venv/bin/python scripts/collect_companion.py
|
||||
|
||||
# Repeater: every 15 minutes (offset by 1 min for staggering)
|
||||
1,16,31,46 * * * * cd $MESHCORE && flock -w 60 /tmp/meshcore.lock .venv/bin/python scripts/collect_repeater.py
|
||||
1,16,31,46 * * * * cd $MESHCORE && .venv/bin/python scripts/collect_repeater.py
|
||||
|
||||
# Charts: every 5 minutes (generates SVG charts from database)
|
||||
*/5 * * * * cd $MESHCORE && .venv/bin/python scripts/render_charts.py
|
||||
@@ -617,7 +729,7 @@ MESHCORE=/path/to/meshcore-stats
|
||||
|
||||
**Notes:**
|
||||
- `cd $MESHCORE` is required because paths in the config are relative to the project root
|
||||
- `flock -w 60` waits up to 60 seconds for the lock, preventing USB serial conflicts
|
||||
- Serial port locking is handled automatically via `fcntl.flock()` in Python (no external `flock` needed)
|
||||
|
||||
## Adding New Metrics
|
||||
|
||||
|
||||
112
Dockerfile
Normal file
112
Dockerfile
Normal file
@@ -0,0 +1,112 @@
|
||||
# =============================================================================
|
||||
# Stage 1: Build dependencies
|
||||
# =============================================================================
|
||||
FROM python:3.12-slim-bookworm AS builder
|
||||
|
||||
# Ofelia version and checksums (verified from GitHub releases)
|
||||
ARG OFELIA_VERSION=0.3.12
|
||||
ARG TARGETARCH
|
||||
ARG OFELIA_SHA256_AMD64=cf06d2199abafbd3aa5afe0f8266e478818faacd11555b99200707321035c931
|
||||
ARG OFELIA_SHA256_ARM64=57760ef7f17a2cd55b5b1e1946f79b91b24bde40d47e81a0d75fd1470d883c1a
|
||||
|
||||
# Install build dependencies for Python packages
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
libfreetype6-dev \
|
||||
libpng-dev \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Download and verify Ofelia binary in builder stage (keeps curl out of runtime)
|
||||
RUN set -ex; \
|
||||
if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
OFELIA_SHA256="$OFELIA_SHA256_AMD64"; \
|
||||
elif [ "$TARGETARCH" = "arm64" ]; then \
|
||||
OFELIA_SHA256="$OFELIA_SHA256_ARM64"; \
|
||||
else \
|
||||
echo "Unsupported architecture: $TARGETARCH" && exit 1; \
|
||||
fi; \
|
||||
curl -fsSL "https://github.com/mcuadros/ofelia/releases/download/v${OFELIA_VERSION}/ofelia_${OFELIA_VERSION}_linux_${TARGETARCH}.tar.gz" -o /tmp/ofelia.tar.gz \
|
||||
&& echo "${OFELIA_SHA256} /tmp/ofelia.tar.gz" | sha256sum -c - \
|
||||
&& tar -xzf /tmp/ofelia.tar.gz -C /usr/local/bin ofelia \
|
||||
&& rm /tmp/ofelia.tar.gz \
|
||||
&& chmod +x /usr/local/bin/ofelia
|
||||
|
||||
# Create virtual environment
|
||||
RUN python -m venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# =============================================================================
|
||||
# Stage 2: Runtime
|
||||
# =============================================================================
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
# OCI Labels
|
||||
LABEL org.opencontainers.image.source="https://github.com/jorijn/meshcore-stats"
|
||||
LABEL org.opencontainers.image.description="MeshCore Stats - LoRa mesh network monitoring"
|
||||
LABEL org.opencontainers.image.licenses="MIT"
|
||||
|
||||
# Install runtime dependencies
|
||||
# - tini: init system for proper signal handling
|
||||
# - libfreetype6, libpng16-16: matplotlib runtime libraries
|
||||
# - fontconfig, fonts-dejavu-core: fonts for chart text rendering
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
tini \
|
||||
libfreetype6 \
|
||||
libpng16-16 \
|
||||
fontconfig \
|
||||
fonts-dejavu-core \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
# Build font cache for matplotlib
|
||||
&& fc-cache -f \
|
||||
# Remove setuid/setgid binaries for security
|
||||
&& find / -perm /6000 -type f -exec chmod a-s {} \; 2>/dev/null || true
|
||||
|
||||
# Create non-root user with dialout group for serial access
|
||||
RUN groupadd -g 1000 meshmon \
|
||||
&& useradd -u 1000 -g meshmon -G dialout -s /sbin/nologin meshmon \
|
||||
&& mkdir -p /data/state /out /tmp/matplotlib \
|
||||
&& chown -R meshmon:meshmon /data /out /tmp/matplotlib
|
||||
|
||||
# Copy Ofelia binary from builder (keeps curl out of runtime image)
|
||||
COPY --from=builder /usr/local/bin/ofelia /usr/local/bin/ofelia
|
||||
|
||||
# Copy virtual environment from builder
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
|
||||
# Copy application code
|
||||
COPY --chown=meshmon:meshmon src/ /app/src/
|
||||
COPY --chown=meshmon:meshmon scripts/ /app/scripts/
|
||||
COPY --chown=meshmon:meshmon docker/ofelia.ini /app/ofelia.ini
|
||||
|
||||
# Environment configuration
|
||||
# - PATH: Include venv so Ofelia can run Python
|
||||
# - PYTHONPATH: Allow imports from src/meshmon
|
||||
# - PYTHONUNBUFFERED: Ensure logs are output immediately
|
||||
# - PYTHONDONTWRITEBYTECODE: Don't create .pyc files
|
||||
# - MPLCONFIGDIR: Matplotlib font cache directory
|
||||
ENV PATH="/opt/venv/bin:$PATH" \
|
||||
PYTHONPATH=/app/src \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
MPLCONFIGDIR=/tmp/matplotlib
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Run as non-root user
|
||||
USER meshmon
|
||||
|
||||
# Use tini as init system for proper signal handling
|
||||
ENTRYPOINT ["/usr/bin/tini", "--"]
|
||||
|
||||
# Run Ofelia scheduler
|
||||
CMD ["ofelia", "daemon", "--config=/app/ofelia.ini"]
|
||||
|
||||
# Health check - verify database is accessible
|
||||
HEALTHCHECK --interval=5m --timeout=30s --start-period=60s --retries=3 \
|
||||
CMD python -c "import sqlite3; sqlite3.connect('/data/state/metrics.db').execute('SELECT 1')" || exit 1
|
||||
577
README.md
577
README.md
@@ -1,6 +1,6 @@
|
||||
# MeshCore Stats
|
||||
|
||||
A Python-based monitoring system for a MeshCore repeater node and its companion. Collects metrics from both devices, stores them in a SQLite database, and generates a static website with interactive SVG charts and statistics.
|
||||
A monitoring system for MeshCore LoRa mesh networks. Collects metrics from companion and repeater nodes, stores them in SQLite, and generates a static dashboard with interactive charts.
|
||||
|
||||
**Live demo:** [meshcore.jorijn.com](https://meshcore.jorijn.com)
|
||||
|
||||
@@ -9,346 +9,359 @@ A Python-based monitoring system for a MeshCore repeater node and its companion.
|
||||
<img src="docs/screenshot-2.png" width="49%" alt="MeshCore Stats Reports">
|
||||
</p>
|
||||
|
||||
## Features
|
||||
## Quick Start
|
||||
|
||||
- **Data Collection** - Collect metrics from companion (local) and repeater (remote) nodes
|
||||
- **Chart Rendering** - Generate interactive SVG charts from the database using matplotlib
|
||||
- **Static Site** - Generate a static HTML website with day/week/month/year views
|
||||
- **Reports** - Generate monthly and yearly statistics reports
|
||||
|
||||
## Requirements
|
||||
|
||||
### Python Dependencies
|
||||
|
||||
- Python 3.10+
|
||||
- meshcore >= 2.2.3
|
||||
- pyserial >= 3.5
|
||||
- jinja2 >= 3.1.0
|
||||
- matplotlib >= 3.8.0
|
||||
|
||||
### System Dependencies
|
||||
|
||||
- sqlite3 (for database maintenance script)
|
||||
|
||||
## Setup
|
||||
|
||||
### 1. Create Virtual Environment
|
||||
> **Linux only** - macOS and Windows users see [Platform Notes](#platform-notes) first.
|
||||
|
||||
```bash
|
||||
cd /path/to/meshcore-stats
|
||||
# Clone and configure
|
||||
git clone https://github.com/jorijn/meshcore-stats.git
|
||||
cd meshcore-stats
|
||||
cp meshcore.conf.example meshcore.conf
|
||||
# Edit meshcore.conf with your repeater name and password
|
||||
|
||||
# Create data directories (container runs as UID 1000)
|
||||
mkdir -p data/state out
|
||||
sudo chown -R 1000:1000 data out
|
||||
|
||||
# Add your serial device
|
||||
cat > docker-compose.override.yml << 'EOF'
|
||||
services:
|
||||
meshcore-stats:
|
||||
devices:
|
||||
- /dev/ttyACM0:/dev/ttyACM0
|
||||
EOF
|
||||
|
||||
# Start
|
||||
docker compose up -d
|
||||
|
||||
# Verify it's working. The various collection and render jobs will trigger after a few minutes.
|
||||
docker compose ps
|
||||
docker compose logs meshcore-stats | head -20
|
||||
|
||||
# View dashboard at http://localhost:8080
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Data Collection** - Metrics from local companion and remote repeater nodes
|
||||
- **Interactive Charts** - SVG charts with day/week/month/year views and tooltips
|
||||
- **Statistics Reports** - Monthly and yearly report generation
|
||||
- **Light/Dark Theme** - Automatic theme switching based on system preference
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Docker and Docker Compose V2
|
||||
- MeshCore companion node connected via USB serial
|
||||
- Remote repeater node reachable via LoRa from the companion
|
||||
|
||||
**Resource requirements:** ~100MB memory, ~100MB disk per year of data.
|
||||
|
||||
## Installation
|
||||
|
||||
### Docker (Recommended)
|
||||
|
||||
#### 1. Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/jorijn/meshcore-stats.git
|
||||
cd meshcore-stats
|
||||
```
|
||||
|
||||
#### 2. Configure
|
||||
|
||||
Copy the example configuration and edit it:
|
||||
|
||||
```bash
|
||||
cp meshcore.conf.example meshcore.conf
|
||||
```
|
||||
|
||||
**Minimal required settings:**
|
||||
|
||||
```ini
|
||||
# Repeater identity (required)
|
||||
REPEATER_NAME=Your Repeater Name
|
||||
REPEATER_PASSWORD=your-admin-password
|
||||
|
||||
# Display names
|
||||
REPEATER_DISPLAY_NAME=My Repeater
|
||||
COMPANION_DISPLAY_NAME=My Companion
|
||||
```
|
||||
|
||||
See [meshcore.conf.example](meshcore.conf.example) for all available options.
|
||||
|
||||
#### 3. Create Data Directories
|
||||
|
||||
```bash
|
||||
mkdir -p data/state out
|
||||
sudo chown -R 1000:1000 data out
|
||||
```
|
||||
|
||||
The container runs as UID 1000, so directories must be writable by this user. If `sudo` is not available, you can relaxed the permissions using `chmod 777 data out`, but this is less secure.
|
||||
|
||||
#### 4. Configure Serial Device
|
||||
|
||||
Create `docker-compose.override.yml` to specify your serial device:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
meshcore-stats:
|
||||
devices:
|
||||
- /dev/ttyACM0:/dev/ttyACM0
|
||||
```
|
||||
|
||||
Ensure your user has serial port access:
|
||||
|
||||
```bash
|
||||
sudo usermod -aG dialout $USER
|
||||
# Log out and back in for changes to take effect
|
||||
```
|
||||
|
||||
#### 5. Start the Containers
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
After the various collection and render jobs has run, the dashboard will be available at **http://localhost:8080**.
|
||||
|
||||
#### Verify Installation
|
||||
|
||||
```bash
|
||||
# Check container status
|
||||
docker compose ps
|
||||
|
||||
# View logs
|
||||
docker compose logs -f meshcore-stats
|
||||
```
|
||||
|
||||
### Common Docker Commands
|
||||
|
||||
```bash
|
||||
# View real-time logs
|
||||
docker compose logs -f meshcore-stats
|
||||
|
||||
# Restart after configuration changes
|
||||
docker compose restart meshcore-stats
|
||||
|
||||
# Update to latest version (database migrations are automatic)
|
||||
docker compose pull && docker compose up -d
|
||||
|
||||
# Stop all containers
|
||||
docker compose down
|
||||
|
||||
# Backup database
|
||||
cp data/state/metrics.db data/state/metrics.db.backup
|
||||
```
|
||||
|
||||
> **Note**: `docker compose down` preserves your data. Use `docker compose down -v` only if you want to delete everything.
|
||||
|
||||
### Manual Installation (Alternative)
|
||||
|
||||
For environments where Docker is not available.
|
||||
|
||||
#### Requirements
|
||||
|
||||
- Python 3.10+
|
||||
- SQLite3
|
||||
|
||||
#### Setup
|
||||
|
||||
```bash
|
||||
cd meshcore-stats
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### 2. Configure
|
||||
|
||||
Copy the example configuration file and customize it:
|
||||
|
||||
```bash
|
||||
cp meshcore.conf.example meshcore.conf
|
||||
# Edit meshcore.conf with your settings
|
||||
```
|
||||
|
||||
The configuration file is automatically loaded by the scripts. Key settings to configure:
|
||||
#### Cron Setup
|
||||
|
||||
- **Connection**: `MESH_SERIAL_PORT`, `MESH_TRANSPORT`
|
||||
- **Repeater Identity**: `REPEATER_NAME`, `REPEATER_PASSWORD`
|
||||
- **Display Names**: `REPEATER_DISPLAY_NAME`, `COMPANION_DISPLAY_NAME`
|
||||
- **Location**: `REPORT_LOCATION_NAME`, `REPORT_LAT`, `REPORT_LON`, `REPORT_ELEV`
|
||||
- **Hardware Info**: `REPEATER_HARDWARE`, `COMPANION_HARDWARE`
|
||||
- **Radio Config**: `RADIO_FREQUENCY`, `RADIO_BANDWIDTH`, etc. (includes presets for different regions)
|
||||
|
||||
See `meshcore.conf.example` for all available options with documentation.
|
||||
|
||||
## Usage
|
||||
|
||||
### Manual Execution
|
||||
|
||||
```bash
|
||||
cd /path/to/meshcore-stats
|
||||
source .venv/bin/activate
|
||||
|
||||
# Collect companion data
|
||||
python scripts/collect_companion.py
|
||||
|
||||
# Collect repeater data
|
||||
python scripts/collect_repeater.py
|
||||
|
||||
# Generate static site (includes chart rendering)
|
||||
python scripts/render_site.py
|
||||
|
||||
# Generate reports
|
||||
python scripts/render_reports.py
|
||||
```
|
||||
|
||||
The configuration is automatically loaded from `meshcore.conf`.
|
||||
|
||||
### Cron Setup
|
||||
|
||||
Add these entries to your crontab (`crontab -e`):
|
||||
Add to your crontab (`crontab -e`):
|
||||
|
||||
```cron
|
||||
# MeshCore Stats - adjust path as needed
|
||||
MESHCORE=/home/user/meshcore-stats
|
||||
MESHCORE=/path/to/meshcore-stats
|
||||
|
||||
# Every minute: collect companion data
|
||||
* * * * * cd $MESHCORE && flock -w 60 /tmp/meshcore.lock .venv/bin/python scripts/collect_companion.py
|
||||
# Companion: every minute
|
||||
* * * * * cd $MESHCORE && .venv/bin/python scripts/collect_companion.py
|
||||
|
||||
# Every 15 minutes: collect repeater data
|
||||
1,16,31,46 * * * * cd $MESHCORE && flock -w 60 /tmp/meshcore.lock .venv/bin/python scripts/collect_repeater.py
|
||||
# Repeater: every 15 minutes
|
||||
1,16,31,46 * * * * cd $MESHCORE && .venv/bin/python scripts/collect_repeater.py
|
||||
|
||||
# Every 5 minutes: render site
|
||||
# Charts: every 5 minutes
|
||||
*/5 * * * * cd $MESHCORE && .venv/bin/python scripts/render_charts.py
|
||||
|
||||
# Site: every 5 minutes
|
||||
*/5 * * * * cd $MESHCORE && .venv/bin/python scripts/render_site.py
|
||||
|
||||
# Daily at midnight: generate reports
|
||||
# Reports: daily at midnight
|
||||
0 0 * * * cd $MESHCORE && .venv/bin/python scripts/render_reports.py
|
||||
|
||||
# Monthly at 3 AM on the 1st: database maintenance
|
||||
0 3 1 * * $MESHCORE/scripts/db_maintenance.sh
|
||||
```
|
||||
|
||||
**Notes:**
|
||||
- `cd $MESHCORE` is required because paths in the config are relative to the project root
|
||||
- `flock` prevents USB serial conflicts when companion and repeater collection overlap
|
||||
Serve the `out/` directory with any web server.
|
||||
|
||||
### Docker / Container Usage
|
||||
## Platform Notes
|
||||
|
||||
When running in Docker, you can skip the config file and pass environment variables directly:
|
||||
<details>
|
||||
<summary><strong>Linux</strong></summary>
|
||||
|
||||
Docker can access USB serial devices directly. Add your device to `docker-compose.override.yml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
meshcore-stats:
|
||||
devices:
|
||||
- /dev/ttyACM0:/dev/ttyACM0
|
||||
```
|
||||
|
||||
Common device paths:
|
||||
- `/dev/ttyACM0` - Arduino/native USB
|
||||
- `/dev/ttyUSB0` - USB-to-serial adapters
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>macOS</strong></summary>
|
||||
|
||||
Docker Desktop for macOS runs in a Linux VM and **cannot directly access USB serial devices**.
|
||||
|
||||
**Option 1: TCP Bridge (Recommended)**
|
||||
|
||||
Expose the serial port over TCP using socat:
|
||||
|
||||
```bash
|
||||
docker run -e MESH_SERIAL_PORT=/dev/ttyUSB0 -e REPEATER_NAME="My Repeater" ...
|
||||
# Install socat
|
||||
brew install socat
|
||||
|
||||
# Bridge serial to TCP (run in background)
|
||||
socat TCP-LISTEN:5000,fork,reuseaddr OPEN:/dev/cu.usbserial-0001,rawer,nonblock,ispeed=115200,ospeed=115200
|
||||
```
|
||||
|
||||
Environment variables always take precedence over `meshcore.conf`.
|
||||
Configure in `meshcore.conf`:
|
||||
|
||||
### Serving the Site
|
||||
|
||||
The static site is generated in the `out/` directory. You can serve it with any web server:
|
||||
|
||||
```bash
|
||||
# Simple Python server for testing
|
||||
cd out && python3 -m http.server 8080
|
||||
|
||||
# Or configure nginx/caddy to serve the out/ directory
|
||||
```ini
|
||||
MESH_TRANSPORT=tcp
|
||||
MESH_TCP_HOST=host.docker.internal
|
||||
MESH_TCP_PORT=5000
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
**Option 2: Native Installation**
|
||||
|
||||
```
|
||||
meshcore-stats/
|
||||
├── requirements.txt
|
||||
├── README.md
|
||||
├── meshcore.conf.example # Example configuration
|
||||
├── meshcore.conf # Your configuration (create this)
|
||||
├── src/meshmon/
|
||||
│ ├── __init__.py
|
||||
│ ├── env.py # Environment variable parsing
|
||||
│ ├── log.py # Logging helper
|
||||
│ ├── meshcore_client.py # MeshCore connection and commands
|
||||
│ ├── db.py # SQLite database module
|
||||
│ ├── retry.py # Retry logic and circuit breaker
|
||||
│ ├── charts.py # Matplotlib SVG chart generation
|
||||
│ ├── html.py # HTML rendering
|
||||
│ ├── reports.py # Report generation
|
||||
│ ├── metrics.py # Metric type definitions
|
||||
│ ├── battery.py # Battery voltage to percentage conversion
|
||||
│ ├── migrations/ # SQL schema migrations
|
||||
│ │ ├── 001_initial_schema.sql
|
||||
│ │ └── 002_eav_schema.sql
|
||||
│ └── templates/ # Jinja2 HTML templates
|
||||
├── scripts/
|
||||
│ ├── collect_companion.py # Collect metrics from companion node
|
||||
│ ├── collect_repeater.py # Collect metrics from repeater node
|
||||
│ ├── render_charts.py # Generate SVG charts from database
|
||||
│ ├── render_site.py # Generate static HTML site
|
||||
│ ├── render_reports.py # Generate monthly/yearly reports
|
||||
│ └── db_maintenance.sh # Database VACUUM/ANALYZE
|
||||
├── data/
|
||||
│ └── state/
|
||||
│ ├── metrics.db # SQLite database (WAL mode)
|
||||
│ └── repeater_circuit.json
|
||||
└── out/ # Generated site
|
||||
├── .htaccess # Apache config (DirectoryIndex, caching)
|
||||
├── styles.css # Stylesheet
|
||||
├── chart-tooltip.js # Chart tooltip enhancement
|
||||
├── day.html # Repeater pages (entry point)
|
||||
├── week.html
|
||||
├── month.html
|
||||
├── year.html
|
||||
├── companion/
|
||||
│ ├── day.html
|
||||
│ ├── week.html
|
||||
│ ├── month.html
|
||||
│ └── year.html
|
||||
└── reports/
|
||||
├── index.html
|
||||
├── repeater/ # YYYY/MM reports
|
||||
└── companion/
|
||||
```
|
||||
Use the manual installation method with cron instead of Docker.
|
||||
|
||||
## Chart Features
|
||||
</details>
|
||||
|
||||
Charts are rendered as inline SVG using matplotlib with the following features:
|
||||
<details>
|
||||
<summary><strong>Windows (WSL2)</strong></summary>
|
||||
|
||||
- **Theme Support**: Automatic light/dark mode via CSS `prefers-color-scheme`
|
||||
- **Interactive Tooltips**: Hover to see exact values and timestamps
|
||||
- **Data Point Indicator**: Visual marker shows position on the chart line
|
||||
- **Mobile Support**: Touch-friendly tooltips
|
||||
- **Statistics**: Min/Avg/Max values displayed below each chart
|
||||
- **Period Views**: Day, week, month, and year time ranges
|
||||
WSL2 and Docker Desktop for Windows cannot directly access COM ports.
|
||||
|
||||
## Troubleshooting
|
||||
Use the TCP bridge approach (similar to macOS) or native installation.
|
||||
|
||||
### Serial Device Not Found
|
||||
</details>
|
||||
|
||||
If you see "No serial ports found" or connection fails:
|
||||
|
||||
1. Check that your device is connected:
|
||||
```bash
|
||||
ls -la /dev/ttyUSB* /dev/ttyACM*
|
||||
```
|
||||
|
||||
2. Check permissions (add user to dialout group):
|
||||
```bash
|
||||
sudo usermod -a -G dialout $USER
|
||||
# Log out and back in for changes to take effect
|
||||
```
|
||||
|
||||
3. Try specifying the port explicitly:
|
||||
```bash
|
||||
export MESH_SERIAL_PORT=/dev/ttyACM0
|
||||
```
|
||||
|
||||
4. Check dmesg for device detection:
|
||||
```bash
|
||||
dmesg | tail -20
|
||||
```
|
||||
|
||||
### Repeater Not Found
|
||||
|
||||
If the script cannot find the repeater contact:
|
||||
|
||||
1. The script will print all discovered contacts - check for the correct name
|
||||
2. Verify REPEATER_NAME matches exactly (case-sensitive)
|
||||
3. Try using REPEATER_KEY_PREFIX instead with the first 6-12 hex chars of the public key
|
||||
|
||||
### Circuit Breaker
|
||||
|
||||
If repeater collection shows "cooldown active":
|
||||
|
||||
1. This is normal after multiple failed remote requests
|
||||
2. Wait for the cooldown period (default 1 hour) or reset manually:
|
||||
```bash
|
||||
rm data/state/repeater_circuit.json
|
||||
```
|
||||
|
||||
## Environment Variables Reference
|
||||
## Configuration Reference
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| **Connection** | | |
|
||||
| `MESH_TRANSPORT` | serial | Connection type: serial, tcp, ble |
|
||||
| `MESH_SERIAL_PORT` | (auto) | Serial port path |
|
||||
| `MESH_SERIAL_BAUD` | 115200 | Baud rate |
|
||||
| `MESH_TCP_HOST` | localhost | TCP host |
|
||||
| `MESH_TCP_PORT` | 5000 | TCP port |
|
||||
| `MESH_BLE_ADDR` | - | BLE device address |
|
||||
| `MESH_BLE_PIN` | - | BLE PIN |
|
||||
| `MESH_DEBUG` | 0 | Enable debug output |
|
||||
| **Repeater Identity** | | |
|
||||
| `REPEATER_NAME` | - | Repeater advertised name |
|
||||
| `REPEATER_KEY_PREFIX` | - | Repeater public key prefix |
|
||||
| `REPEATER_PASSWORD` | - | Repeater login password |
|
||||
| `REPEATER_FETCH_ACL` | 0 | Also fetch ACL from repeater |
|
||||
| **Display Names** | | |
|
||||
| `REPEATER_DISPLAY_NAME` | Repeater Node | Display name for repeater in UI |
|
||||
| `COMPANION_DISPLAY_NAME` | Companion Node | Display name for companion in UI |
|
||||
| `REPEATER_NAME` | *required* | Advertised name to find in contacts |
|
||||
| `REPEATER_PASSWORD` | *required* | Admin password for repeater |
|
||||
| `REPEATER_KEY_PREFIX` | - | Alternative to `REPEATER_NAME`: hex prefix of public key |
|
||||
| **Connection** | | |
|
||||
| `MESH_TRANSPORT` | serial | Transport type: `serial`, `tcp`, or `ble` |
|
||||
| `MESH_SERIAL_PORT` | auto | Serial port path |
|
||||
| `MESH_TCP_HOST` | localhost | TCP host (for TCP transport) |
|
||||
| `MESH_TCP_PORT` | 5000 | TCP port (for TCP transport) |
|
||||
| **Display** | | |
|
||||
| `REPEATER_DISPLAY_NAME` | Repeater Node | Name shown in UI |
|
||||
| `COMPANION_DISPLAY_NAME` | Companion Node | Name shown in UI |
|
||||
| `REPEATER_HARDWARE` | LoRa Repeater | Hardware model for sidebar |
|
||||
| `COMPANION_HARDWARE` | LoRa Node | Hardware model for sidebar |
|
||||
| **Location** | | |
|
||||
| `REPORT_LOCATION_NAME` | Your Location | Full location name for reports |
|
||||
| `REPORT_LOCATION_SHORT` | Your Location | Short location for sidebar/meta |
|
||||
| `REPORT_LAT` | 0.0 | Latitude in decimal degrees |
|
||||
| `REPORT_LON` | 0.0 | Longitude in decimal degrees |
|
||||
| `REPORT_LOCATION_NAME` | Your Location | Full location for reports |
|
||||
| `REPORT_LAT` | 0.0 | Latitude |
|
||||
| `REPORT_LON` | 0.0 | Longitude |
|
||||
| `REPORT_ELEV` | 0.0 | Elevation |
|
||||
| `REPORT_ELEV_UNIT` | m | Elevation unit: "m" or "ft" |
|
||||
| **Hardware Info** | | |
|
||||
| `REPEATER_HARDWARE` | LoRa Repeater | Repeater hardware model for sidebar |
|
||||
| `COMPANION_HARDWARE` | LoRa Node | Companion hardware model for sidebar |
|
||||
| **Radio Config** | | |
|
||||
| `RADIO_FREQUENCY` | 869.618 MHz | Radio frequency for display |
|
||||
| `RADIO_BANDWIDTH` | 62.5 kHz | Radio bandwidth for display |
|
||||
| `RADIO_SPREAD_FACTOR` | SF8 | Spread factor for display |
|
||||
| `RADIO_CODING_RATE` | CR8 | Coding rate for display |
|
||||
| **Intervals** | | |
|
||||
| `COMPANION_STEP` | 60 | Companion data collection interval (seconds) |
|
||||
| `REPEATER_STEP` | 900 | Repeater data collection interval (seconds) |
|
||||
| `REMOTE_TIMEOUT_S` | 10 | Remote request timeout |
|
||||
| `REMOTE_RETRY_ATTEMPTS` | 2 | Max retry attempts |
|
||||
| `REMOTE_RETRY_BACKOFF_S` | 4 | Retry backoff delay |
|
||||
| `REMOTE_CB_FAILS` | 6 | Failures before circuit opens |
|
||||
| `REMOTE_CB_COOLDOWN_S` | 3600 | Circuit breaker cooldown |
|
||||
| **Paths** | | |
|
||||
| `STATE_DIR` | ./data/state | State file path |
|
||||
| `OUT_DIR` | ./out | Output site path |
|
||||
| **Radio** (display only) | | |
|
||||
| `RADIO_FREQUENCY` | 869.618 MHz | Frequency shown in sidebar |
|
||||
| `RADIO_BANDWIDTH` | 62.5 kHz | Bandwidth |
|
||||
| `RADIO_SPREAD_FACTOR` | SF8 | Spread factor |
|
||||
|
||||
## Metrics Reference
|
||||
See [meshcore.conf.example](meshcore.conf.example) for all options with regional radio presets.
|
||||
|
||||
The system uses an EAV (Entity-Attribute-Value) schema where firmware field names are stored directly in the database. This allows new metrics to be captured automatically without schema changes.
|
||||
## Troubleshooting
|
||||
|
||||
### Repeater Metrics
|
||||
| Symptom | Cause | Solution |
|
||||
|---------|-------|----------|
|
||||
| "Permission denied" on serial port | User not in dialout group | `sudo usermod -aG dialout $USER` then re-login |
|
||||
| Repeater shows "offline" status | No data or circuit breaker tripped | Check logs; delete `data/state/repeater_circuit.json` to reset |
|
||||
| Empty charts | Not enough data collected | Wait for 2+ collection cycles |
|
||||
| Container exits immediately | Missing or invalid configuration | Verify `meshcore.conf` exists and has required values |
|
||||
| "No serial ports found" | Device not connected/detected | Check `ls /dev/tty*` and device permissions |
|
||||
| Device path changed after reboot | USB enumeration order changed | Update path in `docker-compose.override.yml` or use udev rules |
|
||||
| "database is locked" errors | Maintenance script running | Wait for completion; check if VACUUM is running |
|
||||
|
||||
| Metric | Type | Display Unit | Description |
|
||||
|--------|------|--------------|-------------|
|
||||
| `bat` | Gauge | Voltage (V) | Battery voltage (stored in mV, displayed as V) |
|
||||
| `bat_pct` | Gauge | Battery (%) | Battery percentage (computed from voltage) |
|
||||
| `last_rssi` | Gauge | RSSI (dBm) | Signal strength of last packet |
|
||||
| `last_snr` | Gauge | SNR (dB) | Signal-to-noise ratio |
|
||||
| `noise_floor` | Gauge | dBm | Background RF noise |
|
||||
| `uptime` | Gauge | Days | Time since reboot (seconds ÷ 86400) |
|
||||
| `tx_queue_len` | Gauge | Queue depth | TX queue length |
|
||||
| `nb_recv` | Counter | Packets/min | Total packets received |
|
||||
| `nb_sent` | Counter | Packets/min | Total packets transmitted |
|
||||
| `airtime` | Counter | Seconds/min | TX airtime rate |
|
||||
| `rx_airtime` | Counter | Seconds/min | RX airtime rate |
|
||||
| `flood_dups` | Counter | Packets/min | Flood duplicate packets |
|
||||
| `direct_dups` | Counter | Packets/min | Direct duplicate packets |
|
||||
| `sent_flood` | Counter | Packets/min | Flood packets transmitted |
|
||||
| `recv_flood` | Counter | Packets/min | Flood packets received |
|
||||
| `sent_direct` | Counter | Packets/min | Direct packets transmitted |
|
||||
| `recv_direct` | Counter | Packets/min | Direct packets received |
|
||||
### Debug Logging
|
||||
|
||||
### Companion Metrics
|
||||
```bash
|
||||
# Enable debug mode in meshcore.conf
|
||||
MESH_DEBUG=1
|
||||
|
||||
| Metric | Type | Display Unit | Description |
|
||||
|--------|------|--------------|-------------|
|
||||
| `battery_mv` | Gauge | Voltage (V) | Battery voltage (stored in mV, displayed as V) |
|
||||
| `bat_pct` | Gauge | Battery (%) | Battery percentage (computed from voltage) |
|
||||
| `contacts` | Gauge | Count | Known mesh nodes |
|
||||
| `uptime_secs` | Gauge | Days | Time since reboot (seconds ÷ 86400) |
|
||||
| `recv` | Counter | Packets/min | Total packets received |
|
||||
| `sent` | Counter | Packets/min | Total packets transmitted |
|
||||
# View detailed logs
|
||||
docker compose logs -f meshcore-stats
|
||||
```
|
||||
|
||||
### Metric Types
|
||||
### Circuit Breaker
|
||||
|
||||
- **Gauge**: Instantaneous values stored as-is (battery voltage, RSSI, queue depth)
|
||||
- **Counter**: Cumulative values where the rate of change is calculated (packets, airtime). Charts display per-minute rates.
|
||||
The repeater collector uses a circuit breaker to avoid spamming LoRa when the repeater is unreachable. After multiple failures, it enters a cooldown period (default: 1 hour).
|
||||
|
||||
## Database
|
||||
To reset manually:
|
||||
|
||||
Metrics are stored in a SQLite database at `data/state/metrics.db` with WAL mode enabled for concurrent read/write access.
|
||||
```bash
|
||||
rm data/state/repeater_circuit.json
|
||||
docker compose restart meshcore-stats
|
||||
```
|
||||
|
||||
### Schema Migrations
|
||||
## Architecture
|
||||
|
||||
Database migrations are stored as SQL files in `src/meshmon/migrations/` and are applied automatically when the database is initialized. Migration files follow the naming convention `NNN_description.sql` (e.g., `001_initial_schema.sql`).
|
||||
```
|
||||
┌─────────────────┐ LoRa ┌─────────────────┐
|
||||
│ Companion │◄─────────────►│ Repeater │
|
||||
│ (USB Serial) │ │ (Remote) │
|
||||
└────────┬────────┘ └─────────────────┘
|
||||
│
|
||||
│ Serial/TCP
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Docker Host │
|
||||
│ ┌───────────┐ │
|
||||
│ │ meshcore- │ │ ┌─────────┐
|
||||
│ │ stats │──┼────►│ nginx │──► :8080
|
||||
│ └───────────┘ │ └─────────┘
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ SQLite + SVG │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
## Public Instances
|
||||
The system runs two containers:
|
||||
- **meshcore-stats**: Collects data on schedule (Ofelia) and generates charts
|
||||
- **nginx**: Serves the static dashboard
|
||||
|
||||
A list of publicly accessible MeshCore Stats installations. Want to add yours? [Open a pull request](https://github.com/jorijn/meshcore-stats/pulls)!
|
||||
## Documentation
|
||||
|
||||
| URL | Hardware | Location |
|
||||
|-----|----------|----------|
|
||||
| [meshcore.jorijn.com](https://meshcore.jorijn.com) | SenseCAP Solar Node P1 Pro + 6.5dBi Mikrotik antenna | Oosterhout, The Netherlands |
|
||||
- [docs/firmware-responses.md](docs/firmware-responses.md) - MeshCore firmware response formats
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Public Instances
|
||||
|
||||
Public MeshCore Stats installations. Want to add yours? [Open a pull request](https://github.com/jorijn/meshcore-stats/pulls)!
|
||||
|
||||
| URL | Hardware | Location |
|
||||
|-----|----------|----------|
|
||||
| [meshcore.jorijn.com](https://meshcore.jorijn.com) | SenseCAP Solar Node P1 Pro + 6.5dBi Mikrotik antenna | Oosterhout, The Netherlands |
|
||||
|
||||
29
docker-compose.dev.yml
Normal file
29
docker-compose.dev.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
# MeshCore Stats - Development Override
|
||||
#
|
||||
# Use this file for local development with live code changes.
|
||||
#
|
||||
# Usage:
|
||||
# docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build
|
||||
#
|
||||
# This override:
|
||||
# - Builds the image locally instead of pulling from ghcr.io
|
||||
# - Mounts src/ and scripts/ for live code changes (no rebuild needed)
|
||||
|
||||
services:
|
||||
meshcore-stats:
|
||||
# Build locally instead of using published image
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
|
||||
# Remove the image reference (use built image)
|
||||
image: meshcore-stats:development
|
||||
|
||||
# Mount source code for live development
|
||||
# Changes to Python files take effect immediately (no rebuild needed)
|
||||
volumes:
|
||||
- ./data/state:/data/state
|
||||
- ./out:/out
|
||||
# Development mounts (read-only to prevent accidental writes)
|
||||
- ./src:/app/src:ro
|
||||
- ./scripts:/app/scripts:ro
|
||||
135
docker-compose.yml
Normal file
135
docker-compose.yml
Normal file
@@ -0,0 +1,135 @@
|
||||
# MeshCore Stats - Docker Compose Configuration
|
||||
#
|
||||
# Production deployment using published container image.
|
||||
# For local development, use: docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build
|
||||
#
|
||||
# Prerequisites:
|
||||
# 1. Copy meshcore.conf.example to meshcore.conf and configure
|
||||
# 2. For serial transport: Create docker-compose.override.yml with your device (see README)
|
||||
# 3. Ensure your user has access to the serial device (dialout group)
|
||||
# 4. Create data directories with correct ownership:
|
||||
# mkdir -p ./data/state ./out && sudo chown -R 1000:1000 ./data ./out
|
||||
|
||||
services:
|
||||
# ==========================================================================
|
||||
# MeshCore Stats - Data collection and rendering
|
||||
# ==========================================================================
|
||||
meshcore-stats:
|
||||
image: ghcr.io/jorijn/meshcore-stats:0.2.9 # x-release-please-version
|
||||
container_name: meshcore-stats
|
||||
restart: unless-stopped
|
||||
|
||||
# Load configuration from meshcore.conf
|
||||
env_file:
|
||||
- meshcore.conf
|
||||
|
||||
# NOTE: Serial device must be added via docker-compose.override.yml
|
||||
# See README.md for examples. TCP transport users don't need devices.
|
||||
|
||||
volumes:
|
||||
# Persistent storage for SQLite database and circuit breaker state
|
||||
- ./data/state:/data/state
|
||||
# Generated static site (served by nginx)
|
||||
- ./out:/out
|
||||
|
||||
# Run as meshmon user (UID 1000)
|
||||
user: "1000:1000"
|
||||
|
||||
# Add dialout group for serial port access
|
||||
group_add:
|
||||
- dialout
|
||||
|
||||
# Security hardening
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- ALL
|
||||
read_only: true
|
||||
tmpfs:
|
||||
- /tmp:noexec,nosuid,size=64m
|
||||
- /var/cache/fontconfig:noexec,nosuid,size=4m
|
||||
|
||||
# Resource limits
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "1.0"
|
||||
memory: 512M
|
||||
reservations:
|
||||
cpus: "0.1"
|
||||
memory: 128M
|
||||
|
||||
# Logging limits to prevent disk exhaustion
|
||||
logging:
|
||||
driver: json-file
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
# Health check
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import sqlite3; sqlite3.connect('/data/state/metrics.db').execute('SELECT 1')"]
|
||||
interval: 5m
|
||||
timeout: 30s
|
||||
start_period: 60s
|
||||
retries: 3
|
||||
|
||||
# ==========================================================================
|
||||
# nginx - Static site server
|
||||
# ==========================================================================
|
||||
nginx:
|
||||
image: nginx:1.27-alpine
|
||||
container_name: meshcore-stats-nginx
|
||||
restart: unless-stopped
|
||||
|
||||
# Run as nginx user (UID 101 in Alpine nginx image)
|
||||
user: "101:101"
|
||||
|
||||
ports:
|
||||
- "8080:8080"
|
||||
|
||||
volumes:
|
||||
# Mount generated static site from meshcore-stats container
|
||||
- ./out:/usr/share/nginx/html:ro
|
||||
# Custom nginx configuration
|
||||
- ./docker/nginx.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
|
||||
# Security hardening
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- ALL
|
||||
# NET_BIND_SERVICE not needed for port 8080 (unprivileged)
|
||||
read_only: true
|
||||
tmpfs:
|
||||
- /var/cache/nginx:noexec,nosuid,size=16m,uid=101,gid=101
|
||||
- /var/run:noexec,nosuid,size=1m,uid=101,gid=101
|
||||
|
||||
# Resource limits
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "0.5"
|
||||
memory: 64M
|
||||
reservations:
|
||||
cpus: "0.05"
|
||||
memory: 16M
|
||||
|
||||
# Logging limits
|
||||
logging:
|
||||
driver: json-file
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
# Health check
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 5s
|
||||
retries: 3
|
||||
|
||||
depends_on:
|
||||
meshcore-stats:
|
||||
condition: service_healthy
|
||||
67
docker/nginx.conf
Normal file
67
docker/nginx.conf
Normal file
@@ -0,0 +1,67 @@
|
||||
# nginx configuration for MeshCore Stats static site
|
||||
# This file is used by the nginx container in docker-compose.yml
|
||||
|
||||
server {
|
||||
listen 8080;
|
||||
server_name _;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
index day.html index.html;
|
||||
|
||||
# UTF-8 charset for all text files
|
||||
charset utf-8;
|
||||
charset_types text/plain text/css text/javascript application/json image/svg+xml;
|
||||
|
||||
# Gzip compression
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/javascript application/json image/svg+xml;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
|
||||
# HTML, JSON, TXT files - no cache (frequently updated)
|
||||
location ~* \.(html|json|txt)$ {
|
||||
add_header Cache-Control "no-cache, no-store, must-revalidate" always;
|
||||
add_header Pragma "no-cache" always;
|
||||
add_header Expires "0" always;
|
||||
# Re-add security headers (add_header in location blocks replaces parent)
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
}
|
||||
|
||||
# PNG files - no cache (charts are regenerated frequently)
|
||||
location ~* \.png$ {
|
||||
add_header Cache-Control "no-cache, no-store, must-revalidate" always;
|
||||
add_header Pragma "no-cache" always;
|
||||
add_header Expires "0" always;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
}
|
||||
|
||||
# CSS, JS, SVG files - short cache (5 minutes)
|
||||
location ~* \.(css|js|svg)$ {
|
||||
expires 5m;
|
||||
add_header Cache-Control "public, max-age=300" always;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
}
|
||||
|
||||
# Default location
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
# Health check endpoint
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "OK\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
}
|
||||
51
docker/ofelia.ini
Normal file
51
docker/ofelia.ini
Normal file
@@ -0,0 +1,51 @@
|
||||
# Ofelia Job Scheduler Configuration
|
||||
# https://github.com/mcuadros/ofelia
|
||||
#
|
||||
# This file defines the cron-like schedule for all MeshCore Stats tasks.
|
||||
# Jobs run inside the same container (job-local).
|
||||
|
||||
# =============================================================================
|
||||
# Data Collection Jobs
|
||||
# =============================================================================
|
||||
|
||||
[job-local "collect-companion"]
|
||||
# Collect metrics from companion node (USB serial)
|
||||
schedule = @every 1m
|
||||
command = python /app/scripts/collect_companion.py
|
||||
no-overlap = true
|
||||
|
||||
[job-local "collect-repeater"]
|
||||
# Collect metrics from repeater node (via LoRa)
|
||||
# Offset by 1 second to avoid USB serial conflicts with companion collection
|
||||
schedule = 1 1,16,31,46 * * * *
|
||||
command = python /app/scripts/collect_repeater.py
|
||||
no-overlap = true
|
||||
|
||||
# =============================================================================
|
||||
# Rendering Jobs
|
||||
# =============================================================================
|
||||
|
||||
[job-local "render-charts"]
|
||||
# Generate SVG charts from database
|
||||
schedule = @every 5m
|
||||
command = python /app/scripts/render_charts.py
|
||||
|
||||
[job-local "render-site"]
|
||||
# Generate static HTML site
|
||||
schedule = @every 5m
|
||||
command = python /app/scripts/render_site.py
|
||||
|
||||
[job-local "render-reports"]
|
||||
# Generate monthly/yearly statistics reports
|
||||
schedule = @daily
|
||||
command = python /app/scripts/render_reports.py
|
||||
|
||||
# =============================================================================
|
||||
# Maintenance Jobs
|
||||
# =============================================================================
|
||||
|
||||
[job-local "db-maintenance"]
|
||||
# Database VACUUM and ANALYZE for optimal performance
|
||||
# Runs at 3 AM on the 1st of each month
|
||||
schedule = 0 3 1 * *
|
||||
command = python -c "import sqlite3; db=sqlite3.connect('/data/state/metrics.db'); db.execute('VACUUM'); db.execute('ANALYZE'); db.close(); print('Database maintenance complete')"
|
||||
@@ -102,6 +102,84 @@ Returns a single dict with all status fields.
|
||||
|
||||
---
|
||||
|
||||
## Telemetry Data
|
||||
|
||||
Environmental telemetry is requested via `req_telemetry_sync(contact)` and returns
|
||||
Cayenne LPP formatted sensor data. This requires `TELEMETRY_ENABLED=1` and a sensor
|
||||
board attached to the repeater.
|
||||
|
||||
### Payload Format
|
||||
|
||||
Both `req_telemetry_sync()` and `get_self_telemetry()` return a dict containing the
|
||||
LPP data list and a public key prefix:
|
||||
|
||||
```python
|
||||
{
|
||||
'pubkey_pre': 'a5c14f5244d6',
|
||||
'lpp': [
|
||||
{'channel': 0, 'type': 'temperature', 'value': 23.5},
|
||||
{'channel': 0, 'type': 'humidity', 'value': 45.2},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
The `extract_lpp_from_payload()` helper in `src/meshmon/telemetry.py` handles
|
||||
extracting the `lpp` list from this wrapper format.
|
||||
|
||||
### `req_telemetry_sync(contact)`
|
||||
|
||||
Returns sensor readings from a remote node in Cayenne LPP format:
|
||||
|
||||
```python
|
||||
[
|
||||
{'channel': 0, 'type': 'temperature', 'value': 23.5},
|
||||
{'channel': 0, 'type': 'humidity', 'value': 45.2},
|
||||
{'channel': 0, 'type': 'barometer', 'value': 1013.25},
|
||||
{'channel': 1, 'type': 'gps', 'value': {'latitude': 51.5, 'longitude': -0.1, 'altitude': 10}},
|
||||
]
|
||||
```
|
||||
|
||||
**Common sensor types:**
|
||||
|
||||
| Type | Unit | Description |
|
||||
|------|------|-------------|
|
||||
| `temperature` | Celsius | Temperature reading |
|
||||
| `humidity` | % | Relative humidity |
|
||||
| `barometer` | hPa/mbar | Barometric pressure |
|
||||
| `voltage` | V | Voltage reading |
|
||||
| `gps` | compound | GPS with `latitude`, `longitude`, `altitude` |
|
||||
|
||||
**Stored as:**
|
||||
- `telemetry.temperature.0` - Temperature on channel 0
|
||||
- `telemetry.humidity.0` - Humidity on channel 0
|
||||
- `telemetry.gps.1.latitude` - GPS latitude on channel 1
|
||||
|
||||
**Notes:**
|
||||
- Requires environmental sensor board (BME280, BME680, etc.) on repeater
|
||||
- Channel number distinguishes multiple sensors of the same type
|
||||
- Not all repeaters have environmental sensors attached
|
||||
- Telemetry collection does not affect circuit breaker state
|
||||
- Telemetry failures are logged as warnings and do not block status collection
|
||||
|
||||
### `get_self_telemetry()`
|
||||
|
||||
Returns self telemetry from the companion node's attached sensors.
|
||||
Same Cayenne LPP format as `req_telemetry_sync()`.
|
||||
|
||||
```python
|
||||
[
|
||||
{'channel': 0, 'type': 'temperature', 'value': 23.5},
|
||||
{'channel': 0, 'type': 'humidity', 'value': 45.2},
|
||||
]
|
||||
```
|
||||
|
||||
**Notes:**
|
||||
- Requires environmental sensor board attached to companion
|
||||
- Returns empty list if no sensors attached
|
||||
- Uses same format as repeater telemetry
|
||||
|
||||
---
|
||||
|
||||
## Derived Metrics
|
||||
|
||||
These are computed at query time, not stored:
|
||||
|
||||
@@ -2,55 +2,66 @@
|
||||
# Copy this file to meshcore.conf and customize for your setup:
|
||||
# cp meshcore.conf.example meshcore.conf
|
||||
#
|
||||
# This file is automatically loaded by the scripts. No need to source it manually.
|
||||
# Environment variables always take precedence over this file (useful for Docker).
|
||||
# Format: KEY=value (no 'export' keyword, no spaces around '=')
|
||||
# This format is compatible with both Docker env_file and shell 'source' command.
|
||||
# Comments start with # and blank lines are ignored.
|
||||
|
||||
# =============================================================================
|
||||
# Connection Settings
|
||||
# =============================================================================
|
||||
|
||||
export MESH_TRANSPORT=serial
|
||||
export MESH_SERIAL_PORT=/dev/ttyUSB0 # Adjust for your system (e.g., /dev/ttyACM0, /dev/cu.usbserial-*)
|
||||
export MESH_SERIAL_BAUD=115200
|
||||
export MESH_DEBUG=0 # Set to 1 for verbose meshcore debug output
|
||||
MESH_TRANSPORT=serial
|
||||
MESH_SERIAL_PORT=/dev/ttyUSB0
|
||||
# MESH_SERIAL_BAUD=115200
|
||||
# MESH_DEBUG=0
|
||||
|
||||
# TCP transport (for macOS Docker or remote serial servers)
|
||||
# MESH_TRANSPORT=tcp
|
||||
# MESH_TCP_HOST=host.docker.internal
|
||||
# MESH_TCP_PORT=5000
|
||||
|
||||
# BLE transport (Bluetooth Low Energy)
|
||||
# MESH_TRANSPORT=ble
|
||||
# MESH_BLE_ADDR=AA:BB:CC:DD:EE:FF
|
||||
# MESH_BLE_PIN=123456
|
||||
|
||||
# =============================================================================
|
||||
# Remote Repeater Identity
|
||||
# =============================================================================
|
||||
# At least REPEATER_NAME or REPEATER_KEY_PREFIX is required to identify your repeater
|
||||
|
||||
export REPEATER_NAME="Your Repeater Name" # Advertised name shown in contacts
|
||||
# export REPEATER_KEY_PREFIX="a1b2c3" # Alternative: hex prefix of public key
|
||||
export REPEATER_PASSWORD="your-password" # Admin password for repeater login
|
||||
REPEATER_NAME=Your Repeater Name
|
||||
# REPEATER_KEY_PREFIX=a1b2c3
|
||||
REPEATER_PASSWORD=your-password
|
||||
|
||||
# =============================================================================
|
||||
# Display Names (shown in UI)
|
||||
# =============================================================================
|
||||
|
||||
export REPEATER_DISPLAY_NAME="My Repeater"
|
||||
export COMPANION_DISPLAY_NAME="My Companion"
|
||||
REPEATER_DISPLAY_NAME=My Repeater
|
||||
COMPANION_DISPLAY_NAME=My Companion
|
||||
|
||||
# Public key prefixes (shown below node name in sidebar, e.g., "!a1b2c3d4")
|
||||
# export REPEATER_PUBKEY_PREFIX="!a1b2c3d4"
|
||||
# export COMPANION_PUBKEY_PREFIX="!e5f6g7h8"
|
||||
# REPEATER_PUBKEY_PREFIX=!a1b2c3d4
|
||||
# COMPANION_PUBKEY_PREFIX=!e5f6g7h8
|
||||
|
||||
# =============================================================================
|
||||
# Location Metadata (for reports and sidebar display)
|
||||
# =============================================================================
|
||||
|
||||
export REPORT_LOCATION_NAME="City, Country" # Full location name for reports
|
||||
export REPORT_LOCATION_SHORT="City, XX" # Short version for sidebar/meta
|
||||
export REPORT_LAT=0.0 # Latitude in decimal degrees
|
||||
export REPORT_LON=0.0 # Longitude in decimal degrees
|
||||
export REPORT_ELEV=0 # Elevation
|
||||
export REPORT_ELEV_UNIT=m # "m" for meters, "ft" for feet
|
||||
REPORT_LOCATION_NAME=City, Country
|
||||
REPORT_LOCATION_SHORT=City, XX
|
||||
REPORT_LAT=0.0
|
||||
REPORT_LON=0.0
|
||||
REPORT_ELEV=0
|
||||
REPORT_ELEV_UNIT=m
|
||||
|
||||
# =============================================================================
|
||||
# Hardware Info (shown in sidebar)
|
||||
# =============================================================================
|
||||
|
||||
export REPEATER_HARDWARE="Your Repeater Model" # e.g., "SenseCAP P1-Pro", "LILYGO T-Beam"
|
||||
export COMPANION_HARDWARE="Your Companion Model" # e.g., "Elecrow ThinkNode-M1", "Heltec V3"
|
||||
REPEATER_HARDWARE=Your Repeater Model
|
||||
COMPANION_HARDWARE=Your Companion Model
|
||||
|
||||
# =============================================================================
|
||||
# Radio Configuration Presets
|
||||
@@ -59,58 +70,71 @@ export COMPANION_HARDWARE="Your Companion Model" # e.g., "Elecrow ThinkNode-M1",
|
||||
# or set custom values. These are for display purposes only.
|
||||
|
||||
# MeshCore EU/UK Narrow (default)
|
||||
export RADIO_FREQUENCY="869.618 MHz"
|
||||
export RADIO_BANDWIDTH="62.5 kHz"
|
||||
export RADIO_SPREAD_FACTOR="SF8"
|
||||
export RADIO_CODING_RATE="CR8"
|
||||
RADIO_FREQUENCY=869.618 MHz
|
||||
RADIO_BANDWIDTH=62.5 kHz
|
||||
RADIO_SPREAD_FACTOR=SF8
|
||||
RADIO_CODING_RATE=CR8
|
||||
|
||||
# # MeshCore EU/UK Wide
|
||||
# export RADIO_FREQUENCY="869.525 MHz"
|
||||
# export RADIO_BANDWIDTH="250 kHz"
|
||||
# export RADIO_SPREAD_FACTOR="SF10"
|
||||
# export RADIO_CODING_RATE="CR5"
|
||||
# MeshCore EU/UK Wide
|
||||
# RADIO_FREQUENCY=869.525 MHz
|
||||
# RADIO_BANDWIDTH=250 kHz
|
||||
# RADIO_SPREAD_FACTOR=SF10
|
||||
# RADIO_CODING_RATE=CR5
|
||||
|
||||
# # MeshCore US Standard
|
||||
# export RADIO_FREQUENCY="906.875 MHz"
|
||||
# export RADIO_BANDWIDTH="250 kHz"
|
||||
# export RADIO_SPREAD_FACTOR="SF10"
|
||||
# export RADIO_CODING_RATE="CR5"
|
||||
# MeshCore US Standard
|
||||
# RADIO_FREQUENCY=906.875 MHz
|
||||
# RADIO_BANDWIDTH=250 kHz
|
||||
# RADIO_SPREAD_FACTOR=SF10
|
||||
# RADIO_CODING_RATE=CR5
|
||||
|
||||
# # MeshCore US Fast
|
||||
# export RADIO_FREQUENCY="906.875 MHz"
|
||||
# export RADIO_BANDWIDTH="500 kHz"
|
||||
# export RADIO_SPREAD_FACTOR="SF7"
|
||||
# export RADIO_CODING_RATE="CR5"
|
||||
# MeshCore US Fast
|
||||
# RADIO_FREQUENCY=906.875 MHz
|
||||
# RADIO_BANDWIDTH=500 kHz
|
||||
# RADIO_SPREAD_FACTOR=SF7
|
||||
# RADIO_CODING_RATE=CR5
|
||||
|
||||
# # MeshCore ANZ (Australia/New Zealand)
|
||||
# export RADIO_FREQUENCY="917.0 MHz"
|
||||
# export RADIO_BANDWIDTH="250 kHz"
|
||||
# export RADIO_SPREAD_FACTOR="SF10"
|
||||
# export RADIO_CODING_RATE="CR5"
|
||||
# MeshCore ANZ (Australia/New Zealand)
|
||||
# RADIO_FREQUENCY=917.0 MHz
|
||||
# RADIO_BANDWIDTH=250 kHz
|
||||
# RADIO_SPREAD_FACTOR=SF10
|
||||
# RADIO_CODING_RATE=CR5
|
||||
|
||||
# =============================================================================
|
||||
# Intervals and Timeouts
|
||||
# =============================================================================
|
||||
|
||||
export COMPANION_STEP=60 # Collection interval for companion (seconds)
|
||||
export REPEATER_STEP=900 # Collection interval for repeater (seconds, 15min default)
|
||||
export REMOTE_TIMEOUT_S=10 # Minimum timeout for LoRa requests
|
||||
export REMOTE_RETRY_ATTEMPTS=2 # Number of retry attempts
|
||||
export REMOTE_RETRY_BACKOFF_S=4 # Seconds between retries
|
||||
# COMPANION_STEP=60
|
||||
# REPEATER_STEP=900
|
||||
# REMOTE_TIMEOUT_S=10
|
||||
# REMOTE_RETRY_ATTEMPTS=2
|
||||
# REMOTE_RETRY_BACKOFF_S=4
|
||||
|
||||
# Circuit breaker settings (prevents spamming LoRa when repeater is unreachable)
|
||||
export REMOTE_CB_FAILS=6 # Failures before circuit breaker opens
|
||||
export REMOTE_CB_COOLDOWN_S=3600 # Cooldown period in seconds (1 hour)
|
||||
# REMOTE_CB_FAILS=6
|
||||
# REMOTE_CB_COOLDOWN_S=3600
|
||||
|
||||
# =============================================================================
|
||||
# Paths
|
||||
# Telemetry Collection (Environmental Sensors)
|
||||
# =============================================================================
|
||||
# Enable telemetry collection from repeater's environmental sensors
|
||||
# (temperature, humidity, barometric pressure, etc.)
|
||||
# Requires sensor board attached to repeater (e.g., BME280, BME680)
|
||||
# Default: 0 (disabled)
|
||||
# TELEMETRY_ENABLED=1
|
||||
|
||||
export STATE_DIR=./data/state # SQLite database and circuit breaker state
|
||||
export OUT_DIR=./out # Generated static site output
|
||||
# Telemetry-specific timeout and retry settings
|
||||
# Defaults match status settings. Separate config allows tuning if telemetry
|
||||
# proves problematic (e.g., firmware doesn't support it, sensor board missing).
|
||||
# You can reduce these if telemetry collection is causing issues.
|
||||
# TELEMETRY_TIMEOUT_S=10
|
||||
# TELEMETRY_RETRY_ATTEMPTS=2
|
||||
# TELEMETRY_RETRY_BACKOFF_S=4
|
||||
|
||||
# =============================================================================
|
||||
# Optional
|
||||
# Paths (Native installation only)
|
||||
# =============================================================================
|
||||
# Docker: Leave these commented. The container uses /data/state and /out by default.
|
||||
# Native: Uncomment for local cron-based installation:
|
||||
# STATE_DIR=./data/state
|
||||
# OUT_DIR=./out
|
||||
|
||||
export REPEATER_FETCH_ACL=0 # Set to 1 to fetch ACL from repeater
|
||||
|
||||
@@ -9,6 +9,11 @@
|
||||
"type": "generic",
|
||||
"path": "src/meshmon/__init__.py",
|
||||
"glob": false
|
||||
},
|
||||
{
|
||||
"type": "generic",
|
||||
"path": "docker-compose.yml",
|
||||
"glob": false
|
||||
}
|
||||
],
|
||||
"changelog-sections": [
|
||||
|
||||
@@ -25,8 +25,9 @@ sys.path.insert(0, str(Path(__file__).parent.parent / "src"))
|
||||
|
||||
from meshmon.env import get_config
|
||||
from meshmon import log
|
||||
from meshmon.meshcore_client import connect_from_env, run_command
|
||||
from meshmon.meshcore_client import connect_with_lock, run_command
|
||||
from meshmon.db import init_db, insert_metrics
|
||||
from meshmon.telemetry import extract_lpp_from_payload, extract_telemetry_metrics
|
||||
|
||||
|
||||
async def collect_companion() -> int:
|
||||
@@ -39,138 +40,132 @@ async def collect_companion() -> int:
|
||||
cfg = get_config()
|
||||
ts = int(time.time())
|
||||
|
||||
log.debug("Connecting to companion node...")
|
||||
mc = await connect_from_env()
|
||||
|
||||
if mc is None:
|
||||
log.error("Failed to connect to companion node")
|
||||
return 1
|
||||
|
||||
# Metrics to insert (firmware field names)
|
||||
metrics: dict[str, float] = {}
|
||||
commands_succeeded = 0
|
||||
|
||||
# Commands are accessed via mc.commands
|
||||
cmd = mc.commands
|
||||
log.debug("Connecting to companion node...")
|
||||
async with connect_with_lock() as mc:
|
||||
if mc is None:
|
||||
log.error("Failed to connect to companion node")
|
||||
return 1
|
||||
|
||||
try:
|
||||
# send_appstart (already called during connect, but call again to get self_info)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_appstart(), "send_appstart"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"appstart: {evt_type}")
|
||||
else:
|
||||
log.error(f"appstart failed: {err}")
|
||||
# Commands are accessed via mc.commands
|
||||
cmd = mc.commands
|
||||
|
||||
# send_device_query
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_device_query(), "send_device_query"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"device_query: {payload}")
|
||||
else:
|
||||
log.error(f"device_query failed: {err}")
|
||||
try:
|
||||
# send_appstart (already called during connect, but call again to get self_info)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_appstart(), "send_appstart"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"appstart: {evt_type}")
|
||||
else:
|
||||
log.error(f"appstart failed: {err}")
|
||||
|
||||
# get_bat
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_bat(), "get_bat"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_bat: {payload}")
|
||||
else:
|
||||
log.error(f"get_bat failed: {err}")
|
||||
# send_device_query
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_device_query(), "send_device_query"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"device_query: {payload}")
|
||||
else:
|
||||
log.error(f"device_query failed: {err}")
|
||||
|
||||
# get_time
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_time(), "get_time"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_time: {payload}")
|
||||
else:
|
||||
log.error(f"get_time failed: {err}")
|
||||
# get_time
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_time(), "get_time"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_time: {payload}")
|
||||
else:
|
||||
log.error(f"get_time failed: {err}")
|
||||
|
||||
# get_self_telemetry
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_self_telemetry(), "get_self_telemetry"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_self_telemetry: {payload}")
|
||||
else:
|
||||
log.error(f"get_self_telemetry failed: {err}")
|
||||
# get_self_telemetry - collect environmental sensor data
|
||||
# Note: The call happens regardless of telemetry_enabled for device query completeness,
|
||||
# but we only extract and store metrics if the feature is enabled.
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_self_telemetry(), "get_self_telemetry"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_self_telemetry: {payload}")
|
||||
# Extract and store telemetry if enabled
|
||||
if cfg.telemetry_enabled:
|
||||
lpp_data = extract_lpp_from_payload(payload)
|
||||
if lpp_data is not None:
|
||||
telemetry_metrics = extract_telemetry_metrics(lpp_data)
|
||||
if telemetry_metrics:
|
||||
metrics.update(telemetry_metrics)
|
||||
log.debug(f"Extracted {len(telemetry_metrics)} telemetry metrics")
|
||||
else:
|
||||
# Debug level because not all devices have sensors attached - this is expected
|
||||
log.debug(f"get_self_telemetry failed: {err}")
|
||||
|
||||
# get_custom_vars
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_custom_vars(), "get_custom_vars"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_custom_vars: {payload}")
|
||||
else:
|
||||
log.debug(f"get_custom_vars failed: {err}")
|
||||
# get_custom_vars
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_custom_vars(), "get_custom_vars"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
log.debug(f"get_custom_vars: {payload}")
|
||||
else:
|
||||
log.debug(f"get_custom_vars failed: {err}")
|
||||
|
||||
# get_contacts - count contacts
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_contacts(), "get_contacts"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
contacts_count = len(payload) if payload else 0
|
||||
metrics["contacts"] = float(contacts_count)
|
||||
log.debug(f"get_contacts: found {contacts_count} contacts")
|
||||
else:
|
||||
log.error(f"get_contacts failed: {err}")
|
||||
# get_contacts - count contacts
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_contacts(), "get_contacts"
|
||||
)
|
||||
if ok:
|
||||
commands_succeeded += 1
|
||||
contacts_count = len(payload) if payload else 0
|
||||
metrics["contacts"] = float(contacts_count)
|
||||
log.debug(f"get_contacts: found {contacts_count} contacts")
|
||||
else:
|
||||
log.error(f"get_contacts failed: {err}")
|
||||
|
||||
# Get statistics - these contain the main metrics
|
||||
# Core stats (battery_mv, uptime_secs, errors, queue_len)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_core(), "get_stats_core"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
# Insert all numeric fields from stats_core
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_core: {payload}")
|
||||
# Get statistics - these contain the main metrics
|
||||
# Core stats (battery_mv, uptime_secs, errors, queue_len)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_core(), "get_stats_core"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
# Insert all numeric fields from stats_core
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_core: {payload}")
|
||||
|
||||
# Radio stats (noise_floor, last_rssi, last_snr, tx_air_secs, rx_air_secs)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_radio(), "get_stats_radio"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_radio: {payload}")
|
||||
# Radio stats (noise_floor, last_rssi, last_snr, tx_air_secs, rx_air_secs)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_radio(), "get_stats_radio"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_radio: {payload}")
|
||||
|
||||
# Packet stats (recv, sent, flood_tx, direct_tx, flood_rx, direct_rx)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_packets(), "get_stats_packets"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_packets: {payload}")
|
||||
# Packet stats (recv, sent, flood_tx, direct_tx, flood_rx, direct_rx)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.get_stats_packets(), "get_stats_packets"
|
||||
)
|
||||
if ok and payload and isinstance(payload, dict):
|
||||
commands_succeeded += 1
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"stats_packets: {payload}")
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Error during collection: {e}")
|
||||
except Exception as e:
|
||||
log.error(f"Error during collection: {e}")
|
||||
|
||||
finally:
|
||||
# Close connection
|
||||
if hasattr(mc, "disconnect"):
|
||||
try:
|
||||
await mc.disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
# Connection closed and lock released by context manager
|
||||
|
||||
# Print summary
|
||||
summary_parts = [f"ts={ts}"]
|
||||
@@ -183,6 +178,10 @@ async def collect_companion() -> int:
|
||||
summary_parts.append(f"rx={int(metrics['recv'])}")
|
||||
if "sent" in metrics:
|
||||
summary_parts.append(f"tx={int(metrics['sent'])}")
|
||||
# Add telemetry count to summary if present
|
||||
telemetry_count = sum(1 for k in metrics if k.startswith("telemetry."))
|
||||
if telemetry_count > 0:
|
||||
summary_parts.append(f"telem={telemetry_count}")
|
||||
|
||||
log.info(f"Companion: {', '.join(summary_parts)}")
|
||||
|
||||
|
||||
@@ -27,15 +27,15 @@ sys.path.insert(0, str(Path(__file__).parent.parent / "src"))
|
||||
from meshmon.env import get_config
|
||||
from meshmon import log
|
||||
from meshmon.meshcore_client import (
|
||||
connect_from_env,
|
||||
connect_with_lock,
|
||||
run_command,
|
||||
get_contact_by_name,
|
||||
get_contact_by_key_prefix,
|
||||
extract_contact_info,
|
||||
list_contacts_summary,
|
||||
)
|
||||
from meshmon.db import init_db, insert_metrics
|
||||
from meshmon.retry import get_repeater_circuit_breaker, with_retries
|
||||
from meshmon.telemetry import extract_lpp_from_payload, extract_telemetry_metrics
|
||||
|
||||
|
||||
async def find_repeater_contact(mc: Any) -> Optional[Any]:
|
||||
@@ -143,8 +143,10 @@ async def query_repeater_with_retry(
|
||||
|
||||
|
||||
async def collect_repeater() -> int:
|
||||
"""
|
||||
Collect data from remote repeater node.
|
||||
"""Collect data from remote repeater node.
|
||||
|
||||
Collects status metrics (battery, uptime, packet counters, etc.) and
|
||||
optionally telemetry data (temperature, humidity, pressure) if enabled.
|
||||
|
||||
Returns:
|
||||
Exit code (0 = success, 1 = error)
|
||||
@@ -161,136 +163,154 @@ async def collect_repeater() -> int:
|
||||
# Skip collection - no metrics to write
|
||||
return 0
|
||||
|
||||
# Connect to companion
|
||||
log.debug("Connecting to companion node...")
|
||||
mc = await connect_from_env()
|
||||
|
||||
if mc is None:
|
||||
log.error("Failed to connect to companion node")
|
||||
return 1
|
||||
|
||||
# Metrics to insert (firmware field names from req_status_sync)
|
||||
metrics: dict[str, float] = {}
|
||||
status_metrics: dict[str, float] = {}
|
||||
telemetry_metrics: dict[str, float] = {}
|
||||
node_name = "unknown"
|
||||
status_ok = False
|
||||
|
||||
# Commands are accessed via mc.commands
|
||||
cmd = mc.commands
|
||||
|
||||
try:
|
||||
# Initialize (appstart already called during connect)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_appstart(), "send_appstart"
|
||||
)
|
||||
if not ok:
|
||||
log.error(f"appstart failed: {err}")
|
||||
|
||||
# Find repeater contact
|
||||
contact = await find_repeater_contact(mc)
|
||||
|
||||
if contact is None:
|
||||
log.error("Cannot find repeater contact")
|
||||
# Connect to companion
|
||||
log.debug("Connecting to companion node...")
|
||||
async with connect_with_lock() as mc:
|
||||
if mc is None:
|
||||
log.error("Failed to connect to companion node")
|
||||
return 1
|
||||
|
||||
# Store contact info
|
||||
contact_info = extract_contact_info(contact)
|
||||
node_name = contact_info.get("adv_name", "unknown")
|
||||
# Commands are accessed via mc.commands
|
||||
cmd = mc.commands
|
||||
|
||||
log.debug(f"Found repeater: {node_name}")
|
||||
try:
|
||||
# Initialize (appstart already called during connect)
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc, cmd.send_appstart(), "send_appstart"
|
||||
)
|
||||
if not ok:
|
||||
log.error(f"appstart failed: {err}")
|
||||
|
||||
# Optional login (if command exists)
|
||||
if cfg.repeater_password and hasattr(cmd, "send_login"):
|
||||
log.debug("Attempting login...")
|
||||
try:
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc,
|
||||
cmd.send_login(contact, cfg.repeater_password),
|
||||
"send_login",
|
||||
)
|
||||
if ok:
|
||||
log.debug("Login successful")
|
||||
else:
|
||||
log.debug(f"Login failed or not supported: {err}")
|
||||
except Exception as e:
|
||||
log.debug(f"Login not supported: {e}")
|
||||
# Find repeater contact
|
||||
contact = await find_repeater_contact(mc)
|
||||
|
||||
# Query status (using _sync version which returns payload directly)
|
||||
# Use timeout=0 to let the device suggest timeout, with min_timeout as floor
|
||||
log.debug("Querying repeater status...")
|
||||
success, payload, err = await query_repeater_with_retry(
|
||||
mc,
|
||||
contact,
|
||||
"req_status_sync",
|
||||
lambda: cmd.req_status_sync(contact, timeout=0, min_timeout=cfg.remote_timeout_s),
|
||||
)
|
||||
if success and payload and isinstance(payload, dict):
|
||||
status_ok = True
|
||||
# Insert all numeric fields from status response
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
metrics[key] = float(value)
|
||||
log.debug(f"req_status_sync: {payload}")
|
||||
else:
|
||||
log.warn(f"req_status_sync failed: {err}")
|
||||
if contact is None:
|
||||
log.error("Cannot find repeater contact")
|
||||
return 1
|
||||
|
||||
# Optional ACL query (using _sync version)
|
||||
if cfg.repeater_fetch_acl:
|
||||
log.debug("Querying repeater ACL...")
|
||||
# Store contact info
|
||||
contact_info = extract_contact_info(contact)
|
||||
node_name = contact_info.get("adv_name", "unknown")
|
||||
|
||||
log.debug(f"Found repeater: {node_name}")
|
||||
|
||||
# Optional login (if command exists)
|
||||
if cfg.repeater_password and hasattr(cmd, "send_login"):
|
||||
log.debug("Attempting login...")
|
||||
try:
|
||||
ok, evt_type, payload, err = await run_command(
|
||||
mc,
|
||||
cmd.send_login(contact, cfg.repeater_password),
|
||||
"send_login",
|
||||
)
|
||||
if ok:
|
||||
log.debug("Login successful")
|
||||
else:
|
||||
log.debug(f"Login failed or not supported: {err}")
|
||||
except Exception as e:
|
||||
log.debug(f"Login not supported: {e}")
|
||||
|
||||
# Phase 1: Status collection (affects circuit breaker)
|
||||
# Use timeout=0 to let the device suggest timeout, with min_timeout as floor
|
||||
log.debug("Querying repeater status...")
|
||||
success, payload, err = await query_repeater_with_retry(
|
||||
mc,
|
||||
contact,
|
||||
"req_acl_sync",
|
||||
lambda: cmd.req_acl_sync(contact, timeout=0, min_timeout=cfg.remote_timeout_s),
|
||||
"req_status_sync",
|
||||
lambda: cmd.req_status_sync(contact, timeout=0, min_timeout=cfg.remote_timeout_s),
|
||||
)
|
||||
if success:
|
||||
log.debug(f"req_acl_sync: {payload}")
|
||||
if success and payload and isinstance(payload, dict):
|
||||
status_ok = True
|
||||
# Insert all numeric fields from status response
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, (int, float)):
|
||||
status_metrics[key] = float(value)
|
||||
log.debug(f"req_status_sync: {payload}")
|
||||
else:
|
||||
log.debug(f"req_acl_sync failed: {err}")
|
||||
log.warn(f"req_status_sync failed: {err}")
|
||||
|
||||
# Update circuit breaker
|
||||
if status_ok:
|
||||
cb.record_success()
|
||||
log.debug("Circuit breaker: recorded success")
|
||||
else:
|
||||
# Update circuit breaker based on status result
|
||||
if status_ok:
|
||||
cb.record_success()
|
||||
log.debug("Circuit breaker: recorded success")
|
||||
else:
|
||||
cb.record_failure(cfg.remote_cb_fails, cfg.remote_cb_cooldown_s)
|
||||
log.debug(f"Circuit breaker: recorded failure ({cb.consecutive_failures}/{cfg.remote_cb_fails})")
|
||||
|
||||
# CRITICAL: Store status metrics immediately before attempting telemetry
|
||||
# This ensures critical data is saved even if telemetry fails
|
||||
if status_ok and status_metrics:
|
||||
try:
|
||||
inserted = insert_metrics(ts=ts, role="repeater", metrics=status_metrics)
|
||||
log.debug(f"Stored {inserted} status metrics (ts={ts})")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to store status metrics: {e}")
|
||||
return 1
|
||||
|
||||
# Phase 2: Telemetry collection (does NOT affect circuit breaker)
|
||||
if cfg.telemetry_enabled and status_ok:
|
||||
log.debug("Querying repeater telemetry...")
|
||||
try:
|
||||
# Note: Telemetry uses its own retry settings and does NOT
|
||||
# affect circuit breaker. Status success proves the link is up;
|
||||
# telemetry failures are likely firmware/capability issues.
|
||||
telem_success, telem_payload, telem_err = await with_retries(
|
||||
lambda: cmd.req_telemetry_sync(
|
||||
contact, timeout=0, min_timeout=cfg.telemetry_timeout_s
|
||||
),
|
||||
attempts=cfg.telemetry_retry_attempts,
|
||||
backoff_s=cfg.telemetry_retry_backoff_s,
|
||||
name="req_telemetry_sync",
|
||||
)
|
||||
|
||||
if telem_success and telem_payload:
|
||||
log.debug(f"req_telemetry_sync: {telem_payload}")
|
||||
lpp_data = extract_lpp_from_payload(telem_payload)
|
||||
if lpp_data is not None:
|
||||
telemetry_metrics = extract_telemetry_metrics(lpp_data)
|
||||
log.debug(f"Extracted {len(telemetry_metrics)} telemetry metrics")
|
||||
|
||||
# Store telemetry metrics
|
||||
if telemetry_metrics:
|
||||
try:
|
||||
inserted = insert_metrics(ts=ts, role="repeater", metrics=telemetry_metrics)
|
||||
log.debug(f"Stored {inserted} telemetry metrics")
|
||||
except Exception as e:
|
||||
log.warn(f"Failed to store telemetry metrics: {e}")
|
||||
else:
|
||||
log.warn(f"req_telemetry_sync failed: {telem_err}")
|
||||
except Exception as e:
|
||||
log.warn(f"Telemetry collection error (continuing): {e}")
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Error during collection: {e}")
|
||||
cb.record_failure(cfg.remote_cb_fails, cfg.remote_cb_cooldown_s)
|
||||
log.debug(f"Circuit breaker: recorded failure ({cb.consecutive_failures}/{cfg.remote_cb_fails})")
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Error during collection: {e}")
|
||||
cb.record_failure(cfg.remote_cb_fails, cfg.remote_cb_cooldown_s)
|
||||
|
||||
finally:
|
||||
# Close connection
|
||||
if hasattr(mc, "disconnect"):
|
||||
try:
|
||||
await mc.disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
# Connection closed and lock released by context manager
|
||||
|
||||
# Print summary
|
||||
summary_parts = [f"ts={ts}"]
|
||||
if "bat" in metrics:
|
||||
bat_v = metrics["bat"] / 1000.0
|
||||
if "bat" in status_metrics:
|
||||
bat_v = status_metrics["bat"] / 1000.0
|
||||
summary_parts.append(f"bat={bat_v:.2f}V")
|
||||
if "uptime" in metrics:
|
||||
uptime_days = metrics["uptime"] // 86400
|
||||
if "uptime" in status_metrics:
|
||||
uptime_days = status_metrics["uptime"] // 86400
|
||||
summary_parts.append(f"uptime={int(uptime_days)}d")
|
||||
if "nb_recv" in metrics:
|
||||
summary_parts.append(f"rx={int(metrics['nb_recv'])}")
|
||||
if "nb_sent" in metrics:
|
||||
summary_parts.append(f"tx={int(metrics['nb_sent'])}")
|
||||
if "nb_recv" in status_metrics:
|
||||
summary_parts.append(f"rx={int(status_metrics['nb_recv'])}")
|
||||
if "nb_sent" in status_metrics:
|
||||
summary_parts.append(f"tx={int(status_metrics['nb_sent'])}")
|
||||
if telemetry_metrics:
|
||||
summary_parts.append(f"telem={len(telemetry_metrics)}")
|
||||
|
||||
log.info(f"Repeater ({node_name}): {', '.join(summary_parts)}")
|
||||
|
||||
# Write metrics to database
|
||||
if status_ok and metrics:
|
||||
try:
|
||||
inserted = insert_metrics(ts=ts, role="repeater", metrics=metrics)
|
||||
log.debug(f"Inserted {inserted} metrics to database (ts={ts})")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to write metrics to database: {e}")
|
||||
return 1
|
||||
|
||||
return 0 if status_ok else 1
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
"""MeshCore network monitoring library."""
|
||||
|
||||
__version__ = "0.2.0" # x-release-please-version
|
||||
__version__ = "0.2.9" # x-release-please-version
|
||||
|
||||
@@ -167,6 +167,7 @@ def load_timeseries_from_db(
|
||||
end_time: datetime,
|
||||
lookback: timedelta,
|
||||
period: str,
|
||||
all_metrics: Optional[dict[str, list[tuple[int, float]]]] = None,
|
||||
) -> TimeSeries:
|
||||
"""Load time series data from SQLite database.
|
||||
|
||||
@@ -179,6 +180,7 @@ def load_timeseries_from_db(
|
||||
end_time: End of the time range (typically now)
|
||||
lookback: How far back to look
|
||||
period: Period name for binning config ("day", "week", etc.)
|
||||
all_metrics: Optional pre-fetched metrics dict for this period
|
||||
|
||||
Returns:
|
||||
TimeSeries with extracted data points
|
||||
@@ -188,7 +190,8 @@ def load_timeseries_from_db(
|
||||
end_ts = int(end_time.timestamp())
|
||||
|
||||
# Fetch all metrics for this role/period (returns pivoted dict)
|
||||
all_metrics = get_metrics_for_period(role, start_ts, end_ts)
|
||||
if all_metrics is None:
|
||||
all_metrics = get_metrics_for_period(role, start_ts, end_ts)
|
||||
|
||||
# Get data for this specific metric
|
||||
metric_data = all_metrics.get(metric, [])
|
||||
@@ -379,10 +382,22 @@ def render_chart_svg(
|
||||
|
||||
# Plot area fill
|
||||
area_color = _hex_to_rgba(theme.area)
|
||||
ax.fill_between(timestamps, values, alpha=area_color[3], color=f"#{theme.line}")
|
||||
area = ax.fill_between(
|
||||
timestamps,
|
||||
values,
|
||||
alpha=area_color[3],
|
||||
color=f"#{theme.line}",
|
||||
)
|
||||
area.set_gid("chart-area")
|
||||
|
||||
# Plot line
|
||||
ax.plot(timestamps, values, color=f"#{theme.line}", linewidth=2)
|
||||
(line,) = ax.plot(
|
||||
timestamps,
|
||||
values,
|
||||
color=f"#{theme.line}",
|
||||
linewidth=2,
|
||||
)
|
||||
line.set_gid("chart-line")
|
||||
|
||||
# Set Y-axis limits and track actual values used
|
||||
if y_min is not None and y_max is not None:
|
||||
@@ -458,7 +473,7 @@ def _inject_data_attributes(
|
||||
|
||||
Adds:
|
||||
- data-metric, data-period, data-theme, data-x-start, data-x-end, data-y-min, data-y-max to root <svg>
|
||||
- data-points JSON array to the chart path element
|
||||
- data-points JSON array to the root <svg> and chart line path
|
||||
|
||||
Args:
|
||||
svg: Raw SVG string
|
||||
@@ -495,22 +510,33 @@ def _inject_data_attributes(
|
||||
r'<svg\b',
|
||||
f'<svg data-metric="{ts.metric}" data-period="{ts.period}" data-theme="{theme_name}" '
|
||||
f'data-x-start="{x_start_ts}" data-x-end="{x_end_ts}" '
|
||||
f'data-y-min="{y_min_val}" data-y-max="{y_max_val}"',
|
||||
f'data-y-min="{y_min_val}" data-y-max="{y_max_val}" '
|
||||
f'data-points="{data_points_attr}"',
|
||||
svg,
|
||||
count=1
|
||||
)
|
||||
|
||||
# Add data-points to the main path element (the line, not the fill)
|
||||
# Look for the second path element (first is usually the fill area)
|
||||
path_count = 0
|
||||
def add_data_to_path(match):
|
||||
nonlocal path_count
|
||||
path_count += 1
|
||||
if path_count == 2: # The line path
|
||||
return f'<path data-points="{data_points_attr}"'
|
||||
return match.group(0)
|
||||
# Add data-points to the line path inside the #chart-line group
|
||||
# matplotlib creates <g id="chart-line"><path d="..."></g>
|
||||
svg, count = re.subn(
|
||||
r'(<g[^>]*id="chart-line"[^>]*>\s*<path\b)',
|
||||
rf'\1 data-points="{data_points_attr}"',
|
||||
svg,
|
||||
count=1,
|
||||
)
|
||||
|
||||
svg = re.sub(r'<path\b', add_data_to_path, svg)
|
||||
if count == 0:
|
||||
# Fallback: look for the second path element (first is usually the fill area)
|
||||
path_count = 0
|
||||
|
||||
def add_data_to_path(match):
|
||||
nonlocal path_count
|
||||
path_count += 1
|
||||
if path_count == 2: # The line path
|
||||
return f'<path data-points="{data_points_attr}"'
|
||||
return match.group(0)
|
||||
|
||||
svg = re.sub(r'<path\b', add_data_to_path, svg)
|
||||
|
||||
return svg
|
||||
|
||||
@@ -558,9 +584,16 @@ def render_all_charts(
|
||||
for metric in metrics:
|
||||
all_stats[metric] = {}
|
||||
|
||||
for period in periods:
|
||||
period_cfg = PERIOD_CONFIG[period]
|
||||
for period in periods:
|
||||
period_cfg = PERIOD_CONFIG[period]
|
||||
x_end = now
|
||||
x_start = now - period_cfg["lookback"]
|
||||
|
||||
start_ts = int(x_start.timestamp())
|
||||
end_ts = int(x_end.timestamp())
|
||||
all_metrics = get_metrics_for_period(role, start_ts, end_ts)
|
||||
|
||||
for metric in metrics:
|
||||
# Load time series from database
|
||||
ts = load_timeseries_from_db(
|
||||
role=role,
|
||||
@@ -568,6 +601,7 @@ def render_all_charts(
|
||||
end_time=now,
|
||||
lookback=period_cfg["lookback"],
|
||||
period=period,
|
||||
all_metrics=all_metrics,
|
||||
)
|
||||
|
||||
# Calculate and store statistics
|
||||
@@ -579,10 +613,6 @@ def render_all_charts(
|
||||
y_min = y_range[0] if y_range else None
|
||||
y_max = y_range[1] if y_range else None
|
||||
|
||||
# Calculate X-axis range for full period padding
|
||||
x_end = now
|
||||
x_start = now - period_cfg["lookback"]
|
||||
|
||||
# Render chart for each theme
|
||||
for theme_name in themes:
|
||||
theme = CHART_THEMES[theme_name]
|
||||
|
||||
@@ -145,7 +145,6 @@ class Config:
|
||||
self.repeater_name = get_str("REPEATER_NAME")
|
||||
self.repeater_key_prefix = get_str("REPEATER_KEY_PREFIX")
|
||||
self.repeater_password = get_str("REPEATER_PASSWORD")
|
||||
self.repeater_fetch_acl = get_bool("REPEATER_FETCH_ACL", False)
|
||||
|
||||
# Intervals and timeouts
|
||||
self.companion_step = get_int("COMPANION_STEP", 60)
|
||||
@@ -156,9 +155,17 @@ class Config:
|
||||
self.remote_cb_fails = get_int("REMOTE_CB_FAILS", 6)
|
||||
self.remote_cb_cooldown_s = get_int("REMOTE_CB_COOLDOWN_S", 3600)
|
||||
|
||||
# Paths
|
||||
self.state_dir = get_path("STATE_DIR", "./data/state")
|
||||
self.out_dir = get_path("OUT_DIR", "./out")
|
||||
# Telemetry collection (requires sensor board on repeater)
|
||||
self.telemetry_enabled = get_bool("TELEMETRY_ENABLED", False)
|
||||
# Separate settings allow tuning if telemetry proves problematic
|
||||
# Defaults match status settings - tune down if needed
|
||||
self.telemetry_timeout_s = get_int("TELEMETRY_TIMEOUT_S", 10)
|
||||
self.telemetry_retry_attempts = get_int("TELEMETRY_RETRY_ATTEMPTS", 2)
|
||||
self.telemetry_retry_backoff_s = get_int("TELEMETRY_RETRY_BACKOFF_S", 4)
|
||||
|
||||
# Paths (defaults are Docker container paths; native installs override via config)
|
||||
self.state_dir = get_path("STATE_DIR", "/data/state")
|
||||
self.out_dir = get_path("OUT_DIR", "/out")
|
||||
|
||||
# Report location metadata
|
||||
self.report_location_name = get_str(
|
||||
|
||||
@@ -588,8 +588,8 @@ def build_page_context(
|
||||
last_updated = None
|
||||
last_updated_iso = None
|
||||
if ts:
|
||||
dt = datetime.fromtimestamp(ts)
|
||||
last_updated = dt.strftime("%b %d, %Y at %H:%M UTC")
|
||||
dt = datetime.fromtimestamp(ts).astimezone()
|
||||
last_updated = dt.strftime("%b %d, %Y at %H:%M %Z")
|
||||
last_updated_iso = dt.isoformat()
|
||||
|
||||
# Build metrics for sidebar
|
||||
@@ -845,24 +845,24 @@ def build_monthly_table_data(
|
||||
airtime = m.get("airtime", MetricStats())
|
||||
|
||||
# Convert mV to V for display
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": False,
|
||||
"cells": [
|
||||
{"value": f"{daily.date.day:02d}", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_time(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": _fmt_val_time(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean else "-", "class": None},
|
||||
{"value": f"{noise.mean:.0f}" if noise.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{airtime.total:,}" if airtime.total else "-", "class": None},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean is not None else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean is not None else "-", "class": None},
|
||||
{"value": f"{noise.mean:.0f}" if noise.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
{"value": f"{airtime.total:,}" if airtime.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -877,24 +877,24 @@ def build_monthly_table_data(
|
||||
tx = s.get("nb_sent", MetricStats())
|
||||
airtime = s.get("airtime", MetricStats())
|
||||
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": True,
|
||||
"cells": [
|
||||
{"value": "", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_day(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": _fmt_val_day(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean else "-", "class": None},
|
||||
{"value": f"{noise.mean:.0f}" if noise.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{airtime.total:,}" if airtime.total else "-", "class": None},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean is not None else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean is not None else "-", "class": None},
|
||||
{"value": f"{noise.mean:.0f}" if noise.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
{"value": f"{airtime.total:,}" if airtime.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -928,21 +928,21 @@ def build_monthly_table_data(
|
||||
tx = m.get("sent", MetricStats())
|
||||
|
||||
# Convert mV to V for display
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": False,
|
||||
"cells": [
|
||||
{"value": f"{daily.date.day:02d}", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_time(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": _fmt_val_time(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -954,21 +954,21 @@ def build_monthly_table_data(
|
||||
rx = s.get("recv", MetricStats())
|
||||
tx = s.get("sent", MetricStats())
|
||||
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": True,
|
||||
"cells": [
|
||||
{"value": "", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_day(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": _fmt_val_day(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1033,23 +1033,23 @@ def build_yearly_table_data(
|
||||
tx = s.get("nb_sent", MetricStats())
|
||||
|
||||
# Convert mV to V
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": False,
|
||||
"cells": [
|
||||
{"value": str(agg.year), "class": None},
|
||||
{"value": f"{monthly.month:02d}", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_day(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": _fmt_val_day(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean is not None else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1062,23 +1062,23 @@ def build_yearly_table_data(
|
||||
rx = s.get("nb_recv", MetricStats())
|
||||
tx = s.get("nb_sent", MetricStats())
|
||||
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": True,
|
||||
"cells": [
|
||||
{"value": "", "class": None},
|
||||
{"value": "", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_month(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": _fmt_val_month(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{rssi.mean:.0f}" if rssi.mean is not None else "-", "class": None},
|
||||
{"value": f"{snr.mean:.1f}" if snr.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1113,22 +1113,22 @@ def build_yearly_table_data(
|
||||
tx = s.get("sent", MetricStats())
|
||||
|
||||
# Convert mV to V
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": False,
|
||||
"cells": [
|
||||
{"value": str(agg.year), "class": None},
|
||||
{"value": f"{monthly.month:02d}", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_day(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": _fmt_val_day(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
@@ -1140,22 +1140,22 @@ def build_yearly_table_data(
|
||||
rx = s.get("recv", MetricStats())
|
||||
tx = s.get("sent", MetricStats())
|
||||
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value else None
|
||||
bat_v_mean = bat.mean / 1000.0 if bat.mean is not None else None
|
||||
bat_v_min = bat.min_value / 1000.0 if bat.min_value is not None else None
|
||||
bat_v_max = bat.max_value / 1000.0 if bat.max_value is not None else None
|
||||
|
||||
rows.append({
|
||||
"is_summary": True,
|
||||
"cells": [
|
||||
{"value": "", "class": None},
|
||||
{"value": "", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean else "-", "class": None},
|
||||
{"value": f"{bat_v_mean:.2f}" if bat_v_mean is not None else "-", "class": None},
|
||||
{"value": f"{bat_pct.mean:.0f}" if bat_pct.mean is not None else "-", "class": None},
|
||||
{"value": _fmt_val_month(bat_v_max, bat.max_time), "class": "muted"},
|
||||
{"value": _fmt_val_month(bat_v_min, bat.min_time), "class": "muted"},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total else "-", "class": None},
|
||||
{"value": f"{contacts.mean:.0f}" if contacts.mean is not None else "-", "class": None},
|
||||
{"value": f"{rx.total:,}" if rx.total is not None else "-", "class": "highlight"},
|
||||
{"value": f"{tx.total:,}" if tx.total is not None else "-", "class": None},
|
||||
],
|
||||
})
|
||||
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""MeshCore client wrapper with safe command execution and contact lookup."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional, Callable, Coroutine
|
||||
import fcntl
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, AsyncIterator, Callable, Coroutine, Optional
|
||||
|
||||
from .env import get_config
|
||||
from . import log
|
||||
@@ -100,6 +103,92 @@ async def connect_from_env() -> Optional[Any]:
|
||||
return None
|
||||
|
||||
|
||||
async def _acquire_lock_async(
|
||||
lock_file,
|
||||
timeout: float = 60.0,
|
||||
poll_interval: float = 0.1,
|
||||
) -> None:
|
||||
"""Acquire exclusive file lock without blocking the event loop.
|
||||
|
||||
Uses non-blocking LOCK_NB with async polling to avoid freezing the event loop.
|
||||
|
||||
Args:
|
||||
lock_file: Open file handle to lock
|
||||
timeout: Maximum seconds to wait for lock
|
||||
poll_interval: Seconds between lock attempts
|
||||
|
||||
Raises:
|
||||
TimeoutError: If lock cannot be acquired within timeout
|
||||
"""
|
||||
loop = asyncio.get_running_loop()
|
||||
deadline = loop.time() + timeout
|
||||
|
||||
while True:
|
||||
try:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
return
|
||||
except BlockingIOError:
|
||||
if loop.time() >= deadline:
|
||||
raise TimeoutError(
|
||||
f"Could not acquire serial lock within {timeout}s. "
|
||||
"Another process may be using the serial port."
|
||||
)
|
||||
await asyncio.sleep(poll_interval)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def connect_with_lock(
|
||||
lock_timeout: float = 60.0,
|
||||
) -> AsyncIterator[Optional[Any]]:
|
||||
"""Connect to MeshCore with serial port locking to prevent concurrent access.
|
||||
|
||||
For serial transport: Acquires exclusive file lock before connecting.
|
||||
For TCP/BLE: No locking needed (protocol handles multiple connections).
|
||||
|
||||
Args:
|
||||
lock_timeout: Maximum seconds to wait for serial lock
|
||||
|
||||
Yields:
|
||||
MeshCore client instance, or None if connection failed
|
||||
"""
|
||||
cfg = get_config()
|
||||
lock_file = None
|
||||
mc = None
|
||||
needs_lock = cfg.mesh_transport.lower() == "serial"
|
||||
|
||||
try:
|
||||
if needs_lock:
|
||||
lock_path: Path = cfg.state_dir / "serial.lock"
|
||||
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Use 'a' mode: doesn't truncate, creates if missing
|
||||
lock_file = open(lock_path, "a")
|
||||
try:
|
||||
await _acquire_lock_async(lock_file, timeout=lock_timeout)
|
||||
log.debug(f"Acquired serial lock: {lock_path}")
|
||||
except Exception:
|
||||
# If lock acquisition fails, close file before re-raising
|
||||
lock_file.close()
|
||||
lock_file = None
|
||||
raise
|
||||
|
||||
mc = await connect_from_env()
|
||||
yield mc
|
||||
|
||||
finally:
|
||||
# Disconnect first (while we still hold the lock)
|
||||
if mc is not None and hasattr(mc, "disconnect"):
|
||||
try:
|
||||
await mc.disconnect()
|
||||
except Exception as e:
|
||||
log.debug(f"Error during disconnect (ignored): {e}")
|
||||
|
||||
# Release lock by closing the file (close() auto-releases flock)
|
||||
if lock_file is not None:
|
||||
lock_file.close()
|
||||
log.debug("Released serial lock")
|
||||
|
||||
|
||||
async def run_command(
|
||||
mc: Any,
|
||||
cmd_coro: Coroutine,
|
||||
|
||||
@@ -17,17 +17,12 @@ import calendar
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date, datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
from .db import get_connection, get_metrics_for_period, VALID_ROLES
|
||||
from .env import get_config
|
||||
from .metrics import (
|
||||
is_counter_metric,
|
||||
get_chart_metrics,
|
||||
transform_value,
|
||||
)
|
||||
from . import log
|
||||
|
||||
|
||||
def _validate_role(role: str) -> str:
|
||||
@@ -59,6 +54,32 @@ def get_metrics_for_role(role: str) -> list[str]:
|
||||
raise ValueError(f"Unknown role: {role}")
|
||||
|
||||
|
||||
REPORT_UNITS_RAW = {
|
||||
"battery_mv": "mV",
|
||||
"bat": "mV",
|
||||
"bat_pct": "%",
|
||||
"uptime": "s",
|
||||
"uptime_secs": "s",
|
||||
"last_rssi": "dBm",
|
||||
"last_snr": "dB",
|
||||
"noise_floor": "dBm",
|
||||
"tx_queue_len": "count",
|
||||
"contacts": "count",
|
||||
"recv": "packets",
|
||||
"sent": "packets",
|
||||
"nb_recv": "packets",
|
||||
"nb_sent": "packets",
|
||||
"airtime": "s",
|
||||
"rx_airtime": "s",
|
||||
"flood_dups": "packets",
|
||||
"direct_dups": "packets",
|
||||
"sent_flood": "packets",
|
||||
"recv_flood": "packets",
|
||||
"sent_direct": "packets",
|
||||
"recv_direct": "packets",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricStats:
|
||||
"""Statistics for a single metric over a period.
|
||||
@@ -1116,10 +1137,14 @@ def format_yearly_txt(
|
||||
return format_yearly_txt_companion(agg, node_name, location)
|
||||
|
||||
|
||||
def _metric_stats_to_dict(stats: MetricStats) -> dict[str, Any]:
|
||||
def _metric_stats_to_dict(stats: MetricStats, metric: str) -> dict[str, Any]:
|
||||
"""Convert MetricStats to JSON-serializable dict."""
|
||||
result: dict[str, Any] = {"count": stats.count}
|
||||
|
||||
unit = REPORT_UNITS_RAW.get(metric)
|
||||
if unit:
|
||||
result["unit"] = unit
|
||||
|
||||
if stats.mean is not None:
|
||||
result["mean"] = round(stats.mean, 4)
|
||||
if stats.min_value is not None:
|
||||
@@ -1144,7 +1169,7 @@ def _daily_to_dict(daily: DailyAggregate) -> dict[str, Any]:
|
||||
"date": daily.date.isoformat(),
|
||||
"snapshot_count": daily.snapshot_count,
|
||||
"metrics": {
|
||||
ds: _metric_stats_to_dict(stats)
|
||||
ds: _metric_stats_to_dict(stats, ds)
|
||||
for ds, stats in daily.metrics.items()
|
||||
if stats.has_data
|
||||
},
|
||||
@@ -1167,7 +1192,7 @@ def monthly_to_json(agg: MonthlyAggregate) -> dict[str, Any]:
|
||||
"role": agg.role,
|
||||
"days_with_data": len(agg.daily),
|
||||
"summary": {
|
||||
ds: _metric_stats_to_dict(stats)
|
||||
ds: _metric_stats_to_dict(stats, ds)
|
||||
for ds, stats in agg.summary.items()
|
||||
if stats.has_data
|
||||
},
|
||||
@@ -1190,7 +1215,7 @@ def yearly_to_json(agg: YearlyAggregate) -> dict[str, Any]:
|
||||
"role": agg.role,
|
||||
"months_with_data": len(agg.monthly),
|
||||
"summary": {
|
||||
ds: _metric_stats_to_dict(stats)
|
||||
ds: _metric_stats_to_dict(stats, ds)
|
||||
for ds, stats in agg.summary.items()
|
||||
if stats.has_data
|
||||
},
|
||||
@@ -1200,7 +1225,7 @@ def yearly_to_json(agg: YearlyAggregate) -> dict[str, Any]:
|
||||
"month": m.month,
|
||||
"days_with_data": len(m.daily),
|
||||
"summary": {
|
||||
ds: _metric_stats_to_dict(stats)
|
||||
ds: _metric_stats_to_dict(stats, ds)
|
||||
for ds, stats in m.summary.items()
|
||||
if stats.has_data
|
||||
},
|
||||
|
||||
102
src/meshmon/telemetry.py
Normal file
102
src/meshmon/telemetry.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Telemetry data extraction from Cayenne LPP format."""
|
||||
|
||||
from typing import Any
|
||||
from . import log
|
||||
|
||||
__all__ = ["extract_lpp_from_payload", "extract_telemetry_metrics"]
|
||||
|
||||
|
||||
def extract_lpp_from_payload(payload: Any) -> list | None:
|
||||
"""Extract LPP data list from telemetry payload.
|
||||
|
||||
Handles both formats returned by the MeshCore API:
|
||||
- Dict format: {'pubkey_pre': '...', 'lpp': [...]}
|
||||
- Direct list format: [...]
|
||||
|
||||
Args:
|
||||
payload: Raw telemetry payload from get_self_telemetry() or req_telemetry_sync()
|
||||
|
||||
Returns:
|
||||
The LPP data list, or None if not extractable.
|
||||
"""
|
||||
if payload is None:
|
||||
return None
|
||||
|
||||
if isinstance(payload, dict):
|
||||
lpp = payload.get("lpp")
|
||||
if lpp is None:
|
||||
log.debug("No 'lpp' key in telemetry payload dict")
|
||||
return None
|
||||
if not isinstance(lpp, list):
|
||||
log.debug(f"Unexpected LPP data type in payload: {type(lpp).__name__}")
|
||||
return None
|
||||
return lpp
|
||||
|
||||
if isinstance(payload, list):
|
||||
return payload
|
||||
|
||||
log.debug(f"Unexpected telemetry payload type: {type(payload).__name__}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_telemetry_metrics(lpp_data: Any) -> dict[str, float]:
|
||||
"""Extract numeric telemetry values from Cayenne LPP response.
|
||||
|
||||
Expected format:
|
||||
[
|
||||
{"type": "temperature", "channel": 0, "value": 23.5},
|
||||
{"type": "gps", "channel": 1, "value": {"latitude": 51.5, "longitude": -0.1, "altitude": 10}}
|
||||
]
|
||||
|
||||
Keys are formatted as:
|
||||
- telemetry.{type}.{channel} for scalar values
|
||||
- telemetry.{type}.{channel}.{subkey} for compound values (e.g., GPS)
|
||||
|
||||
Returns:
|
||||
Dict mapping metric keys to float values. Invalid readings are skipped.
|
||||
"""
|
||||
if not isinstance(lpp_data, list):
|
||||
log.warn(f"Expected list for LPP data, got {type(lpp_data).__name__}")
|
||||
return {}
|
||||
|
||||
metrics: dict[str, float] = {}
|
||||
|
||||
for i, reading in enumerate(lpp_data):
|
||||
if not isinstance(reading, dict):
|
||||
log.debug(f"Skipping non-dict LPP reading at index {i}")
|
||||
continue
|
||||
|
||||
sensor_type = reading.get("type")
|
||||
if not isinstance(sensor_type, str) or not sensor_type.strip():
|
||||
log.debug(f"Skipping reading with invalid type at index {i}")
|
||||
continue
|
||||
|
||||
# Normalize sensor type for use as metric key component
|
||||
sensor_type = sensor_type.strip().lower().replace(" ", "_")
|
||||
|
||||
channel = reading.get("channel", 0)
|
||||
if not isinstance(channel, int):
|
||||
channel = 0
|
||||
|
||||
value = reading.get("value")
|
||||
base_key = f"telemetry.{sensor_type}.{channel}"
|
||||
|
||||
# Note: Check bool before int because bool is a subclass of int in Python.
|
||||
# Some sensors may report digital on/off values as booleans.
|
||||
if isinstance(value, bool):
|
||||
metrics[base_key] = float(value)
|
||||
elif isinstance(value, (int, float)):
|
||||
metrics[base_key] = float(value)
|
||||
elif isinstance(value, dict):
|
||||
for subkey, subval in value.items():
|
||||
if not isinstance(subkey, str):
|
||||
continue
|
||||
subkey_clean = subkey.strip().lower().replace(" ", "_")
|
||||
if not subkey_clean:
|
||||
continue
|
||||
if isinstance(subval, bool):
|
||||
metrics[f"{base_key}.{subkey_clean}"] = float(subval)
|
||||
elif isinstance(subval, (int, float)):
|
||||
metrics[f"{base_key}.{subkey_clean}"] = float(subval)
|
||||
|
||||
return metrics
|
||||
@@ -1,142 +1,331 @@
|
||||
/**
|
||||
* Chart tooltip enhancement for MeshCore Stats
|
||||
* Chart Tooltip Enhancement for MeshCore Stats
|
||||
*
|
||||
* Progressive enhancement: charts work fully without JS,
|
||||
* but this adds interactive tooltips on hover.
|
||||
* Progressive enhancement: charts display fully without JavaScript.
|
||||
* This module adds interactive tooltips showing datetime and value on hover,
|
||||
* with an indicator dot that follows the data line.
|
||||
*
|
||||
* Data sources:
|
||||
* - Data points: path.dataset.points or svg.dataset.points (JSON array of {ts, v})
|
||||
* - Time range: svg.dataset.xStart, svg.dataset.xEnd (Unix timestamps)
|
||||
* - Value range: svg.dataset.yMin, svg.dataset.yMax
|
||||
* - Plot bounds: Derived from clipPath rect or line path bounding box
|
||||
*/
|
||||
(function() {
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
// Create tooltip element
|
||||
const tooltip = document.createElement('div');
|
||||
tooltip.className = 'chart-tooltip';
|
||||
tooltip.innerHTML = '<div class="tooltip-time"></div><div class="tooltip-value"></div>';
|
||||
document.body.appendChild(tooltip);
|
||||
// ============================================================================
|
||||
// Configuration
|
||||
// ============================================================================
|
||||
|
||||
const tooltipTime = tooltip.querySelector('.tooltip-time');
|
||||
const tooltipValue = tooltip.querySelector('.tooltip-value');
|
||||
|
||||
// Track the current indicator element
|
||||
let currentIndicator = null;
|
||||
let currentSvg = null;
|
||||
|
||||
// Metric display labels and units (using firmware field names)
|
||||
const metricLabels = {
|
||||
// Companion metrics
|
||||
'battery_mv': { label: 'Voltage', unit: 'V', decimals: 2 },
|
||||
'uptime_secs': { label: 'Uptime', unit: 'days', decimals: 2 },
|
||||
'contacts': { label: 'Contacts', unit: '', decimals: 0 },
|
||||
'recv': { label: 'Received', unit: '/min', decimals: 1 },
|
||||
'sent': { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
|
||||
// Repeater metrics
|
||||
'bat': { label: 'Voltage', unit: 'V', decimals: 2 },
|
||||
'bat_pct': { label: 'Charge', unit: '%', decimals: 0 },
|
||||
'uptime': { label: 'Uptime', unit: 'days', decimals: 2 },
|
||||
'last_rssi': { label: 'RSSI', unit: 'dBm', decimals: 0 },
|
||||
'last_snr': { label: 'SNR', unit: 'dB', decimals: 1 },
|
||||
'noise_floor': { label: 'Noise', unit: 'dBm', decimals: 0 },
|
||||
'tx_queue_len': { label: 'Queue', unit: '', decimals: 0 },
|
||||
'nb_recv': { label: 'Received', unit: '/min', decimals: 1 },
|
||||
'nb_sent': { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
'airtime': { label: 'TX Air', unit: 's/min', decimals: 2 },
|
||||
'rx_airtime': { label: 'RX Air', unit: 's/min', decimals: 2 },
|
||||
'flood_dups': { label: 'Dropped', unit: '/min', decimals: 1 },
|
||||
'direct_dups': { label: 'Dropped', unit: '/min', decimals: 1 },
|
||||
'sent_flood': { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
'recv_flood': { label: 'Received', unit: '/min', decimals: 1 },
|
||||
'sent_direct': { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
'recv_direct': { label: 'Received', unit: '/min', decimals: 1 },
|
||||
var CONFIG = {
|
||||
tooltipOffset: 15,
|
||||
viewportPadding: 10,
|
||||
indicatorRadius: 5,
|
||||
indicatorStrokeWidth: 2,
|
||||
colors: {
|
||||
light: { fill: '#b45309', stroke: '#ffffff' },
|
||||
dark: { fill: '#f59e0b', stroke: '#0f1114' }
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Format a timestamp as a readable date/time string
|
||||
* Metric display configuration keyed by firmware field name.
|
||||
* Each entry defines how to format values for that metric.
|
||||
*/
|
||||
function formatTime(ts, period) {
|
||||
const date = new Date(ts * 1000);
|
||||
const options = {
|
||||
var METRIC_CONFIG = {
|
||||
// Companion metrics
|
||||
battery_mv: { label: 'Voltage', unit: 'V', decimals: 2 },
|
||||
uptime_secs: { label: 'Uptime', unit: 'days', decimals: 2 },
|
||||
contacts: { label: 'Contacts', unit: '', decimals: 0 },
|
||||
recv: { label: 'Received', unit: '/min', decimals: 1 },
|
||||
sent: { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
|
||||
// Repeater metrics
|
||||
bat: { label: 'Voltage', unit: 'V', decimals: 2 },
|
||||
bat_pct: { label: 'Charge', unit: '%', decimals: 0 },
|
||||
uptime: { label: 'Uptime', unit: 'days', decimals: 2 },
|
||||
last_rssi: { label: 'RSSI', unit: 'dBm', decimals: 0 },
|
||||
last_snr: { label: 'SNR', unit: 'dB', decimals: 1 },
|
||||
noise_floor: { label: 'Noise', unit: 'dBm', decimals: 0 },
|
||||
tx_queue_len: { label: 'Queue', unit: '', decimals: 0 },
|
||||
nb_recv: { label: 'Received', unit: '/min', decimals: 1 },
|
||||
nb_sent: { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
airtime: { label: 'TX Air', unit: 's/min', decimals: 2 },
|
||||
rx_airtime: { label: 'RX Air', unit: 's/min', decimals: 2 },
|
||||
flood_dups: { label: 'Dropped', unit: '/min', decimals: 1 },
|
||||
direct_dups: { label: 'Dropped', unit: '/min', decimals: 1 },
|
||||
sent_flood: { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
recv_flood: { label: 'Received', unit: '/min', decimals: 1 },
|
||||
sent_direct: { label: 'Sent', unit: '/min', decimals: 1 },
|
||||
recv_direct: { label: 'Received', unit: '/min', decimals: 1 }
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
// Formatting Utilities
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Format a Unix timestamp as a localized date/time string.
|
||||
* Uses browser language preference for locale (determines 12/24 hour format).
|
||||
* Includes year only for year-period charts.
|
||||
*/
|
||||
function formatTimestamp(timestamp, period) {
|
||||
var date = new Date(timestamp * 1000);
|
||||
var options = {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit'
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
timeZoneName: 'short'
|
||||
};
|
||||
|
||||
// For year view, include year
|
||||
if (period === 'year') {
|
||||
options.year = 'numeric';
|
||||
}
|
||||
|
||||
return date.toLocaleString(undefined, options);
|
||||
// Use browser's language preference (navigator.language), not system locale
|
||||
// Empty array [] or undefined would use OS regional settings instead
|
||||
return date.toLocaleString(navigator.language, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a value with appropriate decimals and unit
|
||||
* Format a numeric value with the appropriate decimals and unit for a metric.
|
||||
*/
|
||||
function formatValue(value, metric) {
|
||||
const config = metricLabels[metric] || { label: metric, unit: '', decimals: 2 };
|
||||
const formatted = value.toFixed(config.decimals);
|
||||
return `${formatted}${config.unit ? ' ' + config.unit : ''}`;
|
||||
function formatMetricValue(value, metric) {
|
||||
var config = METRIC_CONFIG[metric] || { label: metric, unit: '', decimals: 2 };
|
||||
var formatted = value.toFixed(config.decimals);
|
||||
return config.unit ? formatted + ' ' + config.unit : formatted;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Data Point Utilities
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Find the closest data point to a timestamp, returning index too
|
||||
* Find the data point closest to the target timestamp.
|
||||
* Returns the point object or null if no points available.
|
||||
*/
|
||||
function findClosestPoint(dataPoints, targetTs) {
|
||||
if (!dataPoints || dataPoints.length === 0) return null;
|
||||
function findClosestDataPoint(dataPoints, targetTimestamp) {
|
||||
if (!dataPoints || dataPoints.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let closestIdx = 0;
|
||||
let minDiff = Math.abs(dataPoints[0].ts - targetTs);
|
||||
var closest = dataPoints[0];
|
||||
var minDiff = Math.abs(closest.ts - targetTimestamp);
|
||||
|
||||
for (let i = 1; i < dataPoints.length; i++) {
|
||||
const diff = Math.abs(dataPoints[i].ts - targetTs);
|
||||
for (var i = 1; i < dataPoints.length; i++) {
|
||||
var diff = Math.abs(dataPoints[i].ts - targetTimestamp);
|
||||
if (diff < minDiff) {
|
||||
minDiff = diff;
|
||||
closestIdx = i;
|
||||
closest = dataPoints[i];
|
||||
}
|
||||
}
|
||||
|
||||
return { point: dataPoints[closestIdx], index: closestIdx };
|
||||
return closest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or get the indicator circle for an SVG
|
||||
* Parse and cache data points on an SVG element.
|
||||
* Handles HTML entity encoding from server-side JSON embedding.
|
||||
*/
|
||||
function getDataPoints(svg, rawJson) {
|
||||
if (svg._dataPoints) {
|
||||
return svg._dataPoints;
|
||||
}
|
||||
|
||||
try {
|
||||
var json = rawJson.replace(/"/g, '"');
|
||||
svg._dataPoints = JSON.parse(json);
|
||||
return svg._dataPoints;
|
||||
} catch (error) {
|
||||
console.warn('Chart tooltip: failed to parse data points', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// SVG Coordinate Utilities
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get and cache the plot area bounds for an SVG chart.
|
||||
* Prefers the clip path rect (defines full plot area) over line path bbox
|
||||
* (which only covers the actual data range).
|
||||
*/
|
||||
function getPlotAreaBounds(svg, fallbackPath) {
|
||||
if (svg._plotArea) {
|
||||
return svg._plotArea;
|
||||
}
|
||||
|
||||
var clipRect = svg.querySelector('clipPath rect');
|
||||
if (clipRect) {
|
||||
svg._plotArea = {
|
||||
x: parseFloat(clipRect.getAttribute('x')),
|
||||
y: parseFloat(clipRect.getAttribute('y')),
|
||||
width: parseFloat(clipRect.getAttribute('width')),
|
||||
height: parseFloat(clipRect.getAttribute('height'))
|
||||
};
|
||||
} else if (fallbackPath) {
|
||||
svg._plotArea = fallbackPath.getBBox();
|
||||
}
|
||||
|
||||
return svg._plotArea;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the chart line path element within an SVG.
|
||||
* Tries multiple selectors for compatibility with different SVG structures.
|
||||
*/
|
||||
function findLinePath(svg) {
|
||||
return (
|
||||
svg.querySelector('#chart-line path') ||
|
||||
svg.querySelector('path#chart-line') ||
|
||||
svg.querySelector('[gid="chart-line"] path') ||
|
||||
svg.querySelector('path[gid="chart-line"]') ||
|
||||
svg.querySelector('path[data-points]')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a screen X coordinate to SVG coordinate space.
|
||||
*/
|
||||
function screenToSvgX(svg, clientX) {
|
||||
var svgRect = svg.getBoundingClientRect();
|
||||
var viewBox = svg.viewBox.baseVal;
|
||||
var scale = viewBox.width / svgRect.width;
|
||||
return (clientX - svgRect.left) * scale + viewBox.x;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a timestamp to an X coordinate within the plot area.
|
||||
*/
|
||||
function timestampToX(timestamp, xStart, xEnd, plotArea) {
|
||||
var relativePosition = (timestamp - xStart) / (xEnd - xStart);
|
||||
return plotArea.x + relativePosition * plotArea.width;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a value to a Y coordinate within the plot area.
|
||||
* SVG Y-axis is inverted (0 at top), so higher values map to lower Y.
|
||||
*/
|
||||
function valueToY(value, yMin, yMax, plotArea) {
|
||||
var ySpan = yMax - yMin || 1;
|
||||
var relativePosition = (value - yMin) / ySpan;
|
||||
return plotArea.y + plotArea.height - relativePosition * plotArea.height;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tooltip Element
|
||||
// ============================================================================
|
||||
|
||||
var tooltip = null;
|
||||
var tooltipTimeEl = null;
|
||||
var tooltipValueEl = null;
|
||||
|
||||
/**
|
||||
* Create the tooltip DOM element (called once on init).
|
||||
*/
|
||||
function createTooltipElement() {
|
||||
tooltip = document.createElement('div');
|
||||
tooltip.className = 'chart-tooltip';
|
||||
tooltip.innerHTML =
|
||||
'<div class="tooltip-time"></div>' + '<div class="tooltip-value"></div>';
|
||||
document.body.appendChild(tooltip);
|
||||
|
||||
tooltipTimeEl = tooltip.querySelector('.tooltip-time');
|
||||
tooltipValueEl = tooltip.querySelector('.tooltip-value');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update tooltip content and position it near the cursor.
|
||||
*/
|
||||
function showTooltip(event, timeText, valueText) {
|
||||
tooltipTimeEl.textContent = timeText;
|
||||
tooltipValueEl.textContent = valueText;
|
||||
|
||||
var left = event.pageX + CONFIG.tooltipOffset;
|
||||
var top = event.pageY + CONFIG.tooltipOffset;
|
||||
|
||||
// Keep tooltip within viewport
|
||||
var rect = tooltip.getBoundingClientRect();
|
||||
if (left + rect.width > window.innerWidth - CONFIG.viewportPadding) {
|
||||
left = event.pageX - rect.width - CONFIG.tooltipOffset;
|
||||
}
|
||||
if (top + rect.height > window.innerHeight - CONFIG.viewportPadding) {
|
||||
top = event.pageY - rect.height - CONFIG.tooltipOffset;
|
||||
}
|
||||
|
||||
tooltip.style.left = left + 'px';
|
||||
tooltip.style.top = top + 'px';
|
||||
tooltip.classList.add('visible');
|
||||
}
|
||||
|
||||
/**
|
||||
* Hide the tooltip.
|
||||
*/
|
||||
function hideTooltip() {
|
||||
tooltip.classList.remove('visible');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Indicator Dot
|
||||
// ============================================================================
|
||||
|
||||
var currentIndicator = null;
|
||||
var currentIndicatorSvg = null;
|
||||
|
||||
/**
|
||||
* Get or create the indicator circle for an SVG chart.
|
||||
* Reuses existing indicator if still on the same chart.
|
||||
*/
|
||||
function getIndicator(svg) {
|
||||
if (currentSvg === svg && currentIndicator) {
|
||||
if (currentIndicatorSvg === svg && currentIndicator) {
|
||||
return currentIndicator;
|
||||
}
|
||||
|
||||
// Remove old indicator if switching charts
|
||||
// Remove indicator from previous chart
|
||||
if (currentIndicator && currentIndicator.parentNode) {
|
||||
currentIndicator.parentNode.removeChild(currentIndicator);
|
||||
}
|
||||
|
||||
// Create new indicator as an SVG circle
|
||||
const indicator = document.createElementNS('http://www.w3.org/2000/svg', 'circle');
|
||||
indicator.setAttribute('r', '5');
|
||||
// Create new indicator circle
|
||||
var indicator = document.createElementNS(
|
||||
'http://www.w3.org/2000/svg',
|
||||
'circle'
|
||||
);
|
||||
indicator.setAttribute('r', CONFIG.indicatorRadius);
|
||||
indicator.setAttribute('class', 'chart-indicator');
|
||||
indicator.setAttribute('stroke-width', CONFIG.indicatorStrokeWidth);
|
||||
indicator.style.pointerEvents = 'none';
|
||||
|
||||
// Get theme from SVG data attribute for color
|
||||
const theme = svg.dataset.theme;
|
||||
if (theme === 'dark') {
|
||||
indicator.setAttribute('fill', '#f59e0b');
|
||||
indicator.setAttribute('stroke', '#0f1114');
|
||||
} else {
|
||||
indicator.setAttribute('fill', '#b45309');
|
||||
indicator.setAttribute('stroke', '#ffffff');
|
||||
}
|
||||
indicator.setAttribute('stroke-width', '2');
|
||||
// Apply theme-appropriate colors
|
||||
var theme = svg.dataset.theme === 'dark' ? 'dark' : 'light';
|
||||
indicator.setAttribute('fill', CONFIG.colors[theme].fill);
|
||||
indicator.setAttribute('stroke', CONFIG.colors[theme].stroke);
|
||||
|
||||
svg.appendChild(indicator);
|
||||
currentIndicator = indicator;
|
||||
currentSvg = svg;
|
||||
currentIndicatorSvg = svg;
|
||||
|
||||
return indicator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hide and clean up the indicator
|
||||
* Position the indicator at a specific data point.
|
||||
*/
|
||||
function positionIndicator(svg, dataPoint, xStart, xEnd, yMin, yMax, plotArea) {
|
||||
var indicator = getIndicator(svg);
|
||||
var x = timestampToX(dataPoint.ts, xStart, xEnd, plotArea);
|
||||
var y = valueToY(dataPoint.v, yMin, yMax, plotArea);
|
||||
|
||||
indicator.setAttribute('cx', x);
|
||||
indicator.setAttribute('cy', y);
|
||||
indicator.style.display = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Hide the indicator dot.
|
||||
*/
|
||||
function hideIndicator() {
|
||||
if (currentIndicator) {
|
||||
@@ -144,185 +333,137 @@
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Event Handlers
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Position tooltip near the mouse cursor
|
||||
* Convert a touch event to a mouse-like event object.
|
||||
*/
|
||||
function positionTooltip(event) {
|
||||
const offset = 15;
|
||||
let left = event.pageX + offset;
|
||||
let top = event.pageY + offset;
|
||||
|
||||
// Keep tooltip on screen
|
||||
const rect = tooltip.getBoundingClientRect();
|
||||
const viewportWidth = window.innerWidth;
|
||||
const viewportHeight = window.innerHeight;
|
||||
|
||||
if (left + rect.width > viewportWidth - 10) {
|
||||
left = event.pageX - rect.width - offset;
|
||||
}
|
||||
if (top + rect.height > viewportHeight - 10) {
|
||||
top = event.pageY - rect.height - offset;
|
||||
}
|
||||
|
||||
tooltip.style.left = left + 'px';
|
||||
tooltip.style.top = top + 'px';
|
||||
function touchToMouseEvent(touchEvent) {
|
||||
var touch = touchEvent.touches[0];
|
||||
return {
|
||||
currentTarget: touchEvent.currentTarget,
|
||||
clientX: touch.clientX,
|
||||
clientY: touch.clientY,
|
||||
pageX: touch.pageX,
|
||||
pageY: touch.pageY
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle mouse move over chart SVG
|
||||
* Handle pointer movement over a chart (mouse or touch).
|
||||
* Finds the closest data point and updates tooltip and indicator.
|
||||
*/
|
||||
function handleMouseMove(event) {
|
||||
const svg = event.currentTarget;
|
||||
const metric = svg.dataset.metric;
|
||||
const period = svg.dataset.period;
|
||||
const xStart = parseInt(svg.dataset.xStart, 10);
|
||||
const xEnd = parseInt(svg.dataset.xEnd, 10);
|
||||
const yMin = parseFloat(svg.dataset.yMin);
|
||||
const yMax = parseFloat(svg.dataset.yMax);
|
||||
function handlePointerMove(event) {
|
||||
var svg = event.currentTarget;
|
||||
|
||||
// Find the path with data-points
|
||||
const path = svg.querySelector('path[data-points]');
|
||||
if (!path) return;
|
||||
// Extract chart metadata
|
||||
var metric = svg.dataset.metric;
|
||||
var period = svg.dataset.period;
|
||||
var xStart = parseInt(svg.dataset.xStart, 10);
|
||||
var xEnd = parseInt(svg.dataset.xEnd, 10);
|
||||
var yMin = parseFloat(svg.dataset.yMin);
|
||||
var yMax = parseFloat(svg.dataset.yMax);
|
||||
|
||||
// Parse and cache data points and path coordinates on first access
|
||||
if (!path._dataPoints) {
|
||||
try {
|
||||
const json = path.dataset.points.replace(/"/g, '"');
|
||||
path._dataPoints = JSON.parse(json);
|
||||
} catch (e) {
|
||||
console.warn('Failed to parse chart data:', e);
|
||||
return;
|
||||
}
|
||||
// Find the line path and data points source
|
||||
var linePath = findLinePath(svg);
|
||||
if (!linePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Cache the path's bounding box for coordinate mapping
|
||||
if (!path._pathBox) {
|
||||
path._pathBox = path.getBBox();
|
||||
var rawPoints = linePath.dataset.points || svg.dataset.points;
|
||||
if (!rawPoints) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathBox = path._pathBox;
|
||||
// Parse data points (cached on svg element)
|
||||
var dataPoints = getDataPoints(svg, rawPoints);
|
||||
if (!dataPoints) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get mouse position in SVG coordinate space
|
||||
const svgRect = svg.getBoundingClientRect();
|
||||
const viewBox = svg.viewBox.baseVal;
|
||||
// Get plot area bounds (cached on svg element)
|
||||
var plotArea = getPlotAreaBounds(svg, linePath);
|
||||
if (!plotArea) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert screen X coordinate to SVG coordinate
|
||||
const scaleX = viewBox.width / svgRect.width;
|
||||
const svgX = (event.clientX - svgRect.left) * scaleX + viewBox.x;
|
||||
// Convert screen position to timestamp
|
||||
var svgX = screenToSvgX(svg, event.clientX);
|
||||
var relativeX = Math.max(0, Math.min(1, (svgX - plotArea.x) / plotArea.width));
|
||||
var targetTimestamp = xStart + relativeX * (xEnd - xStart);
|
||||
|
||||
// Calculate relative X position within the plot area (pathBox)
|
||||
const relX = (svgX - pathBox.x) / pathBox.width;
|
||||
// Find and display closest data point
|
||||
var closestPoint = findClosestDataPoint(dataPoints, targetTimestamp);
|
||||
if (!closestPoint) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clamp to plot area bounds
|
||||
const clampedRelX = Math.max(0, Math.min(1, relX));
|
||||
showTooltip(
|
||||
event,
|
||||
formatTimestamp(closestPoint.ts, period),
|
||||
formatMetricValue(closestPoint.v, metric)
|
||||
);
|
||||
|
||||
// Map relative X position to timestamp using the chart's X-axis range
|
||||
const targetTs = xStart + clampedRelX * (xEnd - xStart);
|
||||
|
||||
// Find closest data point by timestamp
|
||||
const result = findClosestPoint(path._dataPoints, targetTs);
|
||||
if (!result) return;
|
||||
|
||||
const { point } = result;
|
||||
|
||||
// Update tooltip content
|
||||
tooltipTime.textContent = formatTime(point.ts, period);
|
||||
tooltipValue.textContent = formatValue(point.v, metric);
|
||||
|
||||
// Position and show tooltip
|
||||
positionTooltip(event);
|
||||
tooltip.classList.add('visible');
|
||||
|
||||
// Position the indicator at the data point
|
||||
const indicator = getIndicator(svg);
|
||||
|
||||
// Calculate X position: map timestamp to path coordinate space
|
||||
const pointRelX = (point.ts - xStart) / (xEnd - xStart);
|
||||
const indicatorX = pathBox.x + pointRelX * pathBox.width;
|
||||
|
||||
// Calculate Y position using the actual Y-axis range from the chart
|
||||
const ySpan = yMax - yMin || 1;
|
||||
// Y is inverted in SVG (0 at top)
|
||||
const pointRelY = 1 - (point.v - yMin) / ySpan;
|
||||
const indicatorY = pathBox.y + pointRelY * pathBox.height;
|
||||
|
||||
indicator.setAttribute('cx', indicatorX);
|
||||
indicator.setAttribute('cy', indicatorY);
|
||||
indicator.style.display = '';
|
||||
positionIndicator(svg, closestPoint, xStart, xEnd, yMin, yMax, plotArea);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hide tooltip when leaving chart
|
||||
* Handle pointer leaving the chart area.
|
||||
*/
|
||||
function handleMouseLeave() {
|
||||
tooltip.classList.remove('visible');
|
||||
function handlePointerLeave() {
|
||||
hideTooltip();
|
||||
hideIndicator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle touch events for mobile
|
||||
* Handle touch start event.
|
||||
*/
|
||||
function handleTouchStart(event) {
|
||||
// Convert touch to mouse-like event
|
||||
const touch = event.touches[0];
|
||||
const mouseEvent = {
|
||||
currentTarget: event.currentTarget,
|
||||
clientX: touch.clientX,
|
||||
clientY: touch.clientY,
|
||||
pageX: touch.pageX,
|
||||
pageY: touch.pageY
|
||||
};
|
||||
|
||||
handleMouseMove(mouseEvent);
|
||||
}
|
||||
|
||||
function handleTouchMove(event) {
|
||||
const touch = event.touches[0];
|
||||
const mouseEvent = {
|
||||
currentTarget: event.currentTarget,
|
||||
clientX: touch.clientX,
|
||||
clientY: touch.clientY,
|
||||
pageX: touch.pageX,
|
||||
pageY: touch.pageY
|
||||
};
|
||||
|
||||
handleMouseMove(mouseEvent);
|
||||
}
|
||||
|
||||
function handleTouchEnd() {
|
||||
handleMouseLeave();
|
||||
handlePointerMove(touchToMouseEvent(event));
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize tooltips for all chart SVGs
|
||||
* Handle touch move event.
|
||||
*/
|
||||
function initTooltips() {
|
||||
// Find all chart SVGs with data attributes
|
||||
const chartSvgs = document.querySelectorAll('svg[data-metric][data-period]');
|
||||
function handleTouchMove(event) {
|
||||
handlePointerMove(touchToMouseEvent(event));
|
||||
}
|
||||
|
||||
chartSvgs.forEach(function(svg) {
|
||||
// Mouse events for desktop
|
||||
svg.addEventListener('mousemove', handleMouseMove);
|
||||
svg.addEventListener('mouseleave', handleMouseLeave);
|
||||
// ============================================================================
|
||||
// Initialization
|
||||
// ============================================================================
|
||||
|
||||
// Touch events for mobile
|
||||
/**
|
||||
* Attach event listeners to all chart SVG elements.
|
||||
*/
|
||||
function initializeChartTooltips() {
|
||||
createTooltipElement();
|
||||
|
||||
var chartSvgs = document.querySelectorAll('svg[data-metric][data-period]');
|
||||
|
||||
chartSvgs.forEach(function (svg) {
|
||||
// Desktop mouse events
|
||||
svg.addEventListener('mousemove', handlePointerMove);
|
||||
svg.addEventListener('mouseleave', handlePointerLeave);
|
||||
|
||||
// Mobile touch events
|
||||
svg.addEventListener('touchstart', handleTouchStart, { passive: true });
|
||||
svg.addEventListener('touchmove', handleTouchMove, { passive: true });
|
||||
svg.addEventListener('touchend', handleTouchEnd);
|
||||
svg.addEventListener('touchcancel', handleTouchEnd);
|
||||
svg.addEventListener('touchend', handlePointerLeave);
|
||||
svg.addEventListener('touchcancel', handlePointerLeave);
|
||||
|
||||
// Set cursor to indicate interactivity
|
||||
// Visual affordance for interactivity
|
||||
svg.style.cursor = 'crosshair';
|
||||
|
||||
// Allow vertical scrolling but prevent horizontal pan on mobile
|
||||
svg.style.touchAction = 'pan-y';
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize when DOM is ready
|
||||
// Run initialization when DOM is ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initTooltips);
|
||||
document.addEventListener('DOMContentLoaded', initializeChartTooltips);
|
||||
} else {
|
||||
initTooltips();
|
||||
initializeChartTooltips();
|
||||
}
|
||||
})();
|
||||
|
||||
Reference in New Issue
Block a user