217 Commits

Author SHA1 Message Date
pablorevilla-meshtastic
52f1a1e788 updated the version number and date 2026-01-24 11:14:06 -08:00
pablorevilla-meshtastic
f44a78730a Added the ablility to skip packets with specific from_id and have secondary enccryption key for mqtt_reader. 2026-01-23 21:49:03 -08:00
pablorevilla-meshtastic
a9a5e046ea more container test 2026-01-23 13:02:34 -08:00
pablorevilla-meshtastic
37386f9e28 change to container.yml 2026-01-23 11:58:48 -08:00
pablorevilla-meshtastic
b66bfb1ee9 Fix error on container build and update README 2026-01-23 11:42:03 -08:00
pablorevilla-meshtastic
caf9cd1596 Updated list of sites runing meshview 2026-01-22 07:42:24 -08:00
pablorevilla-meshtastic
a4ebd2b23c work on net.html to limit packets to last 12 hours instead of 48 hours. 2026-01-21 20:11:17 -08:00
pablorevilla-meshtastic
5676ade6b7 fix api query so that weekly mesh works. 2026-01-21 17:19:19 -08:00
pablorevilla-meshtastic
319f8eac06 optimization 2026-01-20 14:48:33 -08:00
pablorevilla-meshtastic
d85132133a fix bug 2026-01-20 11:27:42 -08:00
pablorevilla-meshtastic
b6d8af409c fix bug on backwards compatibility 2026-01-20 10:10:39 -08:00
pablorevilla-meshtastic
896a0980d5 Update Scripts for PortgreSQL 2026-01-15 16:24:42 -08:00
pablorevilla-meshtastic
7d395e5e27 Correct documentation error 2026-01-15 14:42:18 -08:00
pablorevilla-meshtastic
c3cc01d7e7 Docuement Update 2026-01-15 14:30:04 -08:00
pablorevilla-meshtastic
ecbadc6087 configure "Wal" for sqlite 2026-01-15 14:10:49 -08:00
pablorevilla-meshtastic
ff30623bdf Documentation updte 2026-01-15 11:55:07 -08:00
pablorevilla-meshtastic
a43433ccb4 Update documentation 2026-01-15 11:51:03 -08:00
pablorevilla-meshtastic
4d9db2a52c Update instructions 2026-01-15 11:49:25 -08:00
pablorevilla-meshtastic
e30b59851f Update to 2026-01-15 11:39:24 -08:00
pablorevilla-meshtastic
36dd91be63 Merge branch 'db_updates' 2026-01-15 09:04:09 -08:00
pablorevilla-meshtastic
c9639d851b Fix Time function on store.py 2026-01-15 08:48:22 -08:00
Pablo Revilla
4516c84128 Modify cleanup.sh to use import_time_us for queries
Updated cleanup script to use import_time_us for deletions.
2026-01-14 22:11:52 -08:00
pablorevilla-meshtastic
fa98f56318 Made a cople of changes to the time handling and database config. 2026-01-12 20:10:19 -08:00
pablorevilla-meshtastic
f85e783e8c Adding code to work with multiple databases types. 2026-01-12 14:18:51 -08:00
Pablo Revilla
a882bc22dd Update README with version 3.0.2 details
Added notes about database changes for version 3.0.2.
2026-01-12 10:38:55 -08:00
pablorevilla-meshtastic
e12e3a2a41 Database change to remove import time columns 2026-01-09 13:30:14 -08:00
pablorevilla-meshtastic
da31794d8d Bump version to 3.0.2 and update release date to 2026-1-9 2026-01-09 11:49:58 -08:00
pablorevilla-meshtastic
9912f6b181 testing commit message functionality 2026-01-08 18:39:01 -08:00
pablorevilla-meshtastic
cb4cc281c6 fix speed of node list rendering 2026-01-08 17:38:56 -08:00
pablorevilla-meshtastic
571559114d Add node status indicator and improve favorites handling in nodelist 2026-01-08 17:38:12 -08:00
pablorevilla-meshtastic
df26df07f1 Changes to node.html. fix some of the data 2026-01-08 14:59:45 -08:00
pablorevilla-meshtastic
ffc7340bc9 Changes to nodelist.html. fix some of the data 2026-01-07 17:19:32 -08:00
pablorevilla-meshtastic
1d58aaba83 Changes to nodelist.html. fix some of the data 2026-01-07 13:35:58 -08:00
pablorevilla-meshtastic
b2bb9345fe Changes to nodelist.html. fix some of the data 2026-01-07 13:29:56 -08:00
pablorevilla-meshtastic
9686622b56 Changes to node.html. fix some of the data 2026-01-07 10:01:02 -08:00
pablorevilla-meshtastic
f7644a9573 Changes to node.html. fix some of the data 2026-01-07 09:48:26 -08:00
Pablo Revilla
e48e9464d7 Modify packet.html to add distance 2026-01-03 21:48:19 -08:00
Pablo Revilla
b72bc5d52b Modify packet.html to add distance 2026-01-03 21:44:26 -08:00
Pablo Revilla
1220f0bcbd Modify node.html to add statistics 2026-01-03 21:28:33 -08:00
Pablo Revilla
539410d5bb Modify node.html to add statistics 2026-01-03 21:26:39 -08:00
Pablo Revilla
383b576d18 Modify node.html to add statistics 2026-01-03 21:12:24 -08:00
Pablo Revilla
64a55a3ef3 Modify node.html to add statistics 2026-01-03 20:51:17 -08:00
Pablo Revilla
9408201e57 Modify node.html to add statistics 2026-01-03 19:27:00 -08:00
Pablo Revilla
f75d6bf749 Modify node.html to add statistics 2026-01-03 19:00:39 -08:00
Pablo Revilla
924d223866 Modify node.html to add statistics 2026-01-03 18:13:57 -08:00
Pablo Revilla
e9dcca1f19 Modify node.html to add statistics 2025-12-31 11:58:45 -08:00
Pablo Revilla
00cc2abd23 Modify node.html to add statistics 2025-12-31 11:56:18 -08:00
Pablo Revilla
b76477167d Modify top.html to add paging 2025-12-31 11:13:52 -08:00
Pablo Revilla
b41b249a6d Modify top.html to add paging 2025-12-31 10:38:13 -08:00
Pablo Revilla
71fcda2dd6 Modify top.html to add paging 2025-12-30 09:27:51 -08:00
Pablo Revilla
c4453fbb31 Modify packet.html to sort by hop count. 2025-12-24 10:54:09 -08:00
Pablo Revilla
79fa3f66a8 Fix chart on node.html. 2025-12-24 10:06:17 -08:00
Pablo Revilla
0ce64ac975 Fix chart on node.html. 2025-12-10 09:56:30 -08:00
Pablo Revilla
350aa9e4a3 Fix chart on node.html. 2025-12-09 17:40:49 -08:00
Pablo Revilla
e5bbf972c7 Fix chart on node.html. 2025-12-09 17:35:52 -08:00
Pablo Revilla
4326e12e88 Fix chart on node.html. 2025-12-09 16:58:38 -08:00
Pablo Revilla
00aa3216ff Fix chart on node.html. 2025-12-09 16:19:55 -08:00
Pablo Revilla
3d6c01f020 minor fix on node.html table of tackets shows to and from not just from. 2025-12-08 10:45:33 -08:00
Pablo Revilla
d3bf0ede67 minor fix on node.html table of tackets shows to and from not just from. 2025-12-08 10:29:24 -08:00
Pablo Revilla
2b02166d82 minor fix on node.html table of tackets shows to and from not just from. 2025-12-07 20:02:33 -08:00
Pablo Revilla
2fd36b4b11 minor fix on node.html table of tackets shows to and from not just from. 2025-12-07 17:29:01 -08:00
Pablo Revilla
8aa1c59873 minor fix to langauge dictionary 2025-12-06 11:30:25 -08:00
Pablo Revilla
cd036b8004 efficiency improvement node.html now it only queries the needed node info rather than all the nodes. 2025-12-06 11:26:36 -08:00
Pablo Revilla
989da239fb efficiency improvement for map.html. Now it only download the edges that need to be drawn. 2025-12-04 14:15:46 -08:00
Pablo Revilla
31626494d3 Fix README.md details 2025-12-04 10:38:58 -08:00
Pablo Revilla
960a7ef075 Fix README.md details 2025-12-04 09:41:59 -08:00
Pablo Revilla
60c4d22d2d Update multi-language support. So far Spanish and english. 2025-12-04 09:39:27 -08:00
Pablo Revilla
13a094be00 Update multi-language support. So far Spanish and english. 2025-12-04 09:38:18 -08:00
Pablo Revilla
7744cedd8c Update multi-language support. So far Spanish and english. 2025-12-04 09:35:34 -08:00
Pablo Revilla
ad42c1aeaf Update multi-language support. So far Spanish and english. 2025-12-02 16:03:25 -08:00
Pablo Revilla
41f7bf42a3 Update multi-language support. So far Spanish and english. 2025-12-02 14:45:31 -08:00
Pablo Revilla
0543aeb650 Update multi-language support. So far Spanish and english. 2025-12-02 14:24:10 -08:00
Pablo Revilla
679071cc14 Update multi-language support. So far Spanish and english. 2025-12-02 13:54:39 -08:00
Pablo Revilla
198afcc7d8 Update multi-language support. So far Spanish and english. 2025-12-02 13:51:18 -08:00
Pablo Revilla
191a01a03c update version date 2025-12-01 09:48:51 -08:00
Pablo Revilla
fd653f8234 Fixed Sort nodes by firmware in nodelist.html 2025-12-01 09:38:08 -08:00
Pablo Revilla
2149fed8c5 Fixed Sort nodes by firmware in nodelist.html 2025-11-30 10:38:18 -08:00
Pablo Revilla
5609d18284 worked on making map and base all API driven 2025-11-29 19:27:57 -08:00
Pablo Revilla
705b0b79fc worked on making map and base all API driven 2025-11-29 19:12:53 -08:00
Joel Krauska
32ad8e3a9c Fix search 2 (#108)
Co-authored-by: Pablo Revilla <pablorevilla@gmail.com>
2025-11-29 19:07:58 -08:00
Joel Krauska
e77428661c Version 3.0.0 Feature Release - Target Before Thanksgiving! (#96)
* Add alembic DB schema management (#86)

* Use alembic
* add creation helper
* example migration tool

* Store UTC int time in DB (#81)

* use UTC int time

* Remove old index notes script -- no longer needed

* modify alembic to support cleaner migrations

* add /version json endpoint

* move technical docs

* remove old migrate script

* add readme in docs:

* more doc tidy

* rm

* update api docs

* ignore other database files

* health endpoint

* alembic log format

* break out api calls in to their own file to reduce footprint

* ruff and docs

* vuln

* Improves arguments in mvrun.py

* Set dbcleanup.log location configurable

* mvrun work

* fallback if missing config

* remove unused loop

* improve migrations and fix logging problem with mqtt

* Container using slim/uv

* auto build containers

* symlink

* fix symlink

* checkout and containerfile

* make /app owned by ap0p

* Traceroute Return Path logged and displayed (#97)


* traceroute returns are now logged and /packetlist now graphs the correct data for a return route
* now using alembic to update schema
* HOWTO - Alembic

---------

Co-authored-by: Joel Krauska <jkrauska@gmail.com>

* DB Backups

* backups and cleanups are different

* ruff

* Docker Docs

* setup-dev

* graphviz for dot in Container

* Summary of 3.0.0 stuff

* Alembic was blocking mqtt logs

* Add us first/last timestamps to node table too

* Worked on /api/packet. Needed to modify
- Store.py to read the new time data
- api.py to present the new time data
- firehose.html chat.html and map.html now use the new apis and the time is the browser local time

* Worked on /api/packet. Needed to modify
- Store.py to read the new time data
- api.py to present the new time data
- firehose.html chat.html and map.html now use the new apis and the time is the browser local time

* Improves container build (#94)

* Worked on /api/packet. Needed to modify
- Store.py to read the new time data
- api.py to present the new time data
- firehose.html chat.html and map.html now use the new apis and the time is the browser local time

* Worked on /api/packet. Needed to modify
- Store.py to read the new time data
- api.py to present the new time data
- firehose.html chat.html and map.html now use the new apis and the time is the browser local time

* Worked on /api/packet. Needed to modify
- Added new api endpoint /api/packets_seen
- Modified web.py and store.py to support changes to APIs.
- Started to work on new_node.html and new_packet.html for presentation of data.

* Worked on /api/packet. Needed to modify
- Added new api endpoint /api/packets_seen
- Modified web.py and store.py to support changes to APIs.
- Started to work on new_node.html and new_packet.html for presentation of data.

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* fix ruff format

* more ruff

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* pyproject.toml requirements

* use sys.executable

* fix 0 epoch dates in /chat

* Make the robots do our bidding

* another compatibility fix when _us is empty and we need to sort by BOTH old and new

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* Finishing up all the pages for the 3.0 release.

Now all pages are functional.

* Remamed new_node to node. shorter and descriptive.

* Remamed new_node to node. shorter and descriptive.

* Remamed new_node to node. shorter and descriptive.

* Remamed new_node to node. shorter and descriptive.

* Remamed new_node to node. shorter and descriptive.

* Remamed new_node to node. shorter and descriptive.

* More changes... almost ready for release.

Ranamed 2 pages for easy or reading.

* Fix the net page as it was not showing the date information

* Fix the net page as it was not showing the date information

* Fix the net page as it was not showing the date information

* Fix the net page as it was not showing the date information

* ruff

---------

Co-authored-by: Óscar García Amor <ogarcia@connectical.com>
Co-authored-by: Jim Schrempp <jschrempp@users.noreply.github.com>
Co-authored-by: Pablo Revilla <pablorevilla@gmail.com>
2025-11-28 11:17:20 -08:00
Joel Krauska
e68cdf8cc1 test commit
Added information about the new statistic page and API.
2025-11-03 12:43:07 -08:00
Pablo Revilla
60ae77772d worked on making map and base all API driven 2025-11-02 11:41:15 -08:00
Pablo Revilla
ed33bfe540 worked on making map and base all API driven 2025-11-02 11:39:29 -08:00
Pablo Revilla
47a22911ca worked on making map and base all API driven 2025-11-01 18:30:26 -07:00
Pablo Revilla
d61427db8f worked on making map and base all API driven 2025-10-31 16:55:41 -07:00
Pablo Revilla
f11455eebc worked on making map and base all API driven 2025-10-31 16:55:05 -07:00
Pablo Revilla
0a548904c8 Merge remote-tracking branch 'origin/master' 2025-10-31 16:54:15 -07:00
Pablo Revilla
a0e5bb0747 worked on making map and base all API driven 2025-10-31 16:52:32 -07:00
Pablo Revilla
986ef8e4e5 Merge pull request #92 from io235/master
Add Salzburg/Austria to list of running instances
2025-10-31 11:07:59 -07:00
Pablo Revilla
54f7f1b1ce worked on making map and base all API driven 2025-10-31 07:45:48 -07:00
Io
6886a97874 Add Salzburg/Austria to list of running instances 2025-10-31 10:09:13 +00:00
Pablo Revilla
c4f2e3f24f Merge pull request #87 from jkrauska/traceLines
render traceroutes on top
2025-10-27 21:31:11 -07:00
Joel Krauska
8db8e90f80 use ruff format 2025-10-27 14:40:41 -07:00
Joel Krauska
3ea2809df0 render traceroutes on top 2025-10-27 14:38:35 -07:00
Pablo Revilla
f7f932d821 worked on making map and base all API driven 2025-10-23 13:53:41 -07:00
Pablo Revilla
ad8835a46b worked on making map and base all API driven 2025-10-22 15:57:34 -07:00
Pablo Revilla
cbe4895b2c worked on making map and base all API driven 2025-10-22 15:22:47 -07:00
Pablo Revilla
d9b1d5ac49 worked on making map and base all API driven 2025-10-22 14:31:07 -07:00
Pablo Revilla
13aa73e88f worked on making map and base all API driven 2025-10-22 09:20:08 -07:00
Pablo Revilla
58244bff09 worked on making map and base all API driven 2025-10-22 08:54:18 -07:00
Pablo Revilla
635353f3c8 worked on making map and base all API driven 2025-10-21 21:07:28 -07:00
Pablo Revilla
d5fb589665 worked on making map and base all API driven 2025-10-18 15:52:50 -07:00
Pablo Revilla
a4b51ace73 worked on making map and base all API driven 2025-10-18 15:27:13 -07:00
Pablo Revilla
75d0d9ea6a worked on making map and base all API driven 2025-10-18 15:13:08 -07:00
Pablo Revilla
c909ff58a5 Merge pull request #80 from pablorevilla-meshtastic/revert-78-10-15-25-bugs2
Revert "Add configurable channel filtering with allowlist and minimum packet threshold"
2025-10-17 18:40:48 -07:00
Pablo Revilla
a15b039a1f Revert "Add configurable channel filtering with allowlist and minimum packet threshold" 2025-10-17 18:40:22 -07:00
Pablo Revilla
d52b7d0929 Merge pull request #78 from nullrouten0/10-15-25-bugs2
Add configurable channel filtering with allowlist and minimum packet threshold
2025-10-17 16:54:06 -07:00
Nathan
d56ee8f4c5 ruff fixes 2025-10-17 15:36:36 -07:00
Pablo Revilla
52ca8a4060 Merge branch 'master' into 10-15-25-bugs2 2025-10-17 15:09:10 -07:00
Pablo Revilla
e4a6de3615 worked on making map and base all API driven 2025-10-17 14:26:08 -07:00
Pablo Revilla
3cca445cad worked on making map and base all API driven 2025-10-17 12:54:44 -07:00
Pablo Revilla
8b0c7a16e7 Start adding language support 2025-10-16 13:34:35 -07:00
Nathan
c5a1009877 added channel filtering min_packets, and allowlist, fixed javascript error, new sample.config.ini sections 2025-10-16 01:07:49 -07:00
Pablo Revilla
65ada1ba3e Merge pull request #77 from pablorevilla-meshtastic/revert-73-maphours-stacked
Revert "Maphours changes stacked with filtering additions"
2025-10-15 21:36:29 -07:00
Pablo Revilla
7f94bc0e39 Merge branch 'master' into revert-73-maphours-stacked 2025-10-15 21:36:06 -07:00
Pablo Revilla
5d687da598 Merge pull request #76 from pablorevilla-meshtastic/revert-75-10-15-25-bugs
Revert "fixed map to show only channels with locations"
2025-10-15 21:33:16 -07:00
Pablo Revilla
a002cde2d7 Revert "fixed map to show only channels with locations" 2025-10-15 21:32:50 -07:00
Nathan
954cd4653d fixing node graph selector 2025-10-15 18:02:52 -07:00
Pablo Revilla
454c8ff6e2 Start adding language support 2025-10-15 16:27:43 -07:00
Pablo Revilla
021bc54f9d Start adding language support 2025-10-15 16:25:34 -07:00
Pablo Revilla
155ef89724 Merge remote-tracking branch 'origin/master' 2025-10-15 16:24:07 -07:00
Pablo Revilla
084647eec1 Start adding language support 2025-10-15 16:23:59 -07:00
Pablo Revilla
c13a851145 Merge pull request #75 from nullrouten0/10-15-25-bugs
fixed map to show only channels with locations
2025-10-15 16:15:01 -07:00
Pablo Revilla
114cd980b9 Merge branch 'master' into 10-15-25-bugs 2025-10-15 16:14:47 -07:00
Nathan
c23a650c0d fixed map to show only channels with locations 2025-10-15 16:07:52 -07:00
Pablo Revilla
318bf83403 Revert "Maphours changes stacked with filtering additions" 2025-10-15 15:57:56 -07:00
Pablo Revilla
636ab3e976 Start adding language support 2025-10-15 15:57:39 -07:00
Pablo Revilla
ea10a656e7 Start adding language support 2025-10-15 15:31:04 -07:00
Pablo Revilla
bcd007e5e2 Merge pull request #73 from nullrouten0/maphours-stacked
Maphours changes stacked with filtering additions
2025-10-15 15:09:24 -07:00
Nathan
b35acde821 Add channel-aware activity filters and API-driven dashboards 2025-10-14 21:34:52 -07:00
Nathan
b7752bc315 Map: activity time filters 2025-10-11 21:21:10 -07:00
Nathan
257bf7ffac Add channel filters to stats, chat, and firehose views 2025-10-11 16:28:34 -07:00
Pablo Revilla
d561d1a8de Start adding language support 2025-10-10 21:37:45 -07:00
Pablo Revilla
60e7389d83 Start adding language support 2025-10-10 21:34:42 -07:00
Pablo Revilla
4ac3262544 Start adding language support 2025-10-10 21:34:36 -07:00
Pablo Revilla
87643e4bd2 Start adding language support 2025-10-10 21:32:36 -07:00
Pablo Revilla
29174a649c Start adding language support 2025-10-10 21:28:48 -07:00
Pablo Revilla
712aea5139 Start adding language support 2025-10-10 21:10:35 -07:00
Pablo Revilla
d6fadd99d0 Start adding language support 2025-10-10 21:01:43 -07:00
Pablo Revilla
ae0b0944f0 Merge pull request #68 from jkrauska/profileTop
Add database indexes for 10X improvement in page load for /top
2025-10-10 12:58:41 -07:00
Pablo Revilla
d7b830e2f7 Merge pull request #69 from jkrauska/lornet.pl
fix for loranet.pl missing gateway_id
2025-10-08 18:31:05 -07:00
Joel Krauska
4a1737ebd4 fix for loranet.pl 2025-10-07 20:13:00 -07:00
Joel Krauska
60131007df fix for ruff 2025-10-07 19:49:57 -07:00
Joel Krauska
23d66c0d67 add database indexes 2025-10-07 18:22:50 -07:00
Pablo Revilla
30ba603f66 Merge pull request #67 from jkrauska/nodeListFavorites
FEATURE: Add NodeList Favorites and Remember Map Filters
2025-10-07 16:08:52 -07:00
Pablo Revilla
9811102681 Merge pull request #66 from jkrauska/apiEdges
BUG: Fix for api/edges traceback
2025-10-07 15:54:04 -07:00
Joel Krauska
7c92b06bec use ruff format 2025-10-07 14:15:29 -07:00
Joel Krauska
adda666a39 Add Favorites and Remember Filters 2025-10-07 14:04:14 -07:00
Joel Krauska
3e673f30bc Fix for api/edges traceback 2025-10-07 13:59:20 -07:00
Pablo Revilla
beefb4c5df Merge pull request #64 from jkrauska/ruffVersionFix
Bump ruff version - fix open call from lang work
2025-10-03 21:56:19 -07:00
Joel Krauska
e1bada8378 Bump ruff version - fix open call from lang work 2025-10-03 21:34:13 -07:00
Pablo Revilla
fbd6fcb123 Merge pull request #62 from jkrauska/ruffAutomation
Automate ruff in github action
2025-10-03 21:03:53 -07:00
Pablo Revilla
5d267effa5 Remove unused code 2025-10-03 20:56:47 -07:00
Joel Krauska
e28d248cf9 Automate ruff in github action 2025-10-03 20:55:11 -07:00
Pablo Revilla
ab101dd461 Merge pull request #61 from jkrauska/jkrauska/ruffFormat
Add Ruff formatting and pre-commit hooks
2025-10-03 20:49:44 -07:00
Joel Krauska
35212d403e Merge branch 'master' into jkrauska/ruffFormat 2025-10-03 20:43:16 -07:00
Pablo Revilla
3603014fd2 Added maps coordinates to /api/config 2025-10-03 20:41:02 -07:00
Joel Krauska
e25ff22127 Add Ruff formatting and pre-commit hooks
Configure Ruff as the code formatter and linter with pre-commit hooks.
  Applied automatic formatting fixes across the entire codebase including:
  - Import sorting and organization
  - Code style consistency (spacing, line breaks, indentation)
  - String quote normalization
  - Removal of trailing whitespace and unnecessary blank lines
2025-10-03 20:38:37 -07:00
Pablo Revilla
aa9922e7fa work on error where packet ids could be duplicate and crash the loop 2025-10-03 12:54:00 -07:00
Pablo Revilla
a9b16d6c18 work on error where packet ids could be duplicate and crash the loop 2025-10-03 12:33:04 -07:00
Pablo Revilla
b4fda0bb01 Merge remote-tracking branch 'origin/master' 2025-10-03 11:59:35 -07:00
Pablo Revilla
215817abc7 Cleanup the install process 2025-10-03 11:59:21 -07:00
Pablo Revilla
f167e8780d Merge pull request #57 from jkrauska/jkrauska/startupLogging
Add structured logging and improved startup/shutdown handling
2025-10-03 08:58:31 -07:00
Joel Krauska
2723022dd5 Add structured logging and improved startup/shutdown handling
- Add consistent logging format across all modules (timestamp, file:line, PID, level)
- Add startup logging for MQTT connection, web server startup with URL display
- Add MQTT message processing metrics (count and rate logging every 10k messages)
- Add graceful shutdown handling with signal handlers and PID file cleanup
- Add configurable HTTP access log toggle via config.ini (default: disabled)
- Replace print() statements with proper logger calls throughout
- Update .gitignore to exclude PID files (meshview-db.pid, meshview-web.pid)
- Update documentation for new logging configuration options

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-01 17:49:01 -07:00
Pablo Revilla
d2d18746ef Fixed bug on edges API 2025-10-01 14:00:14 -07:00
Pablo Revilla
7146f69beb update protobuf 2025-10-01 12:07:43 -07:00
Pablo Revilla
db8703919d Merge pull request #53 from jkrauska/jkrauska/mapzoom
Add url parameters to /map to support zoomed view
2025-10-01 11:43:45 -07:00
Pablo Revilla
baeaf29df0 Merge pull request #51 from Cloud-121/master
Fix Client_BASE not showing in Mesh Graphs
2025-10-01 09:18:50 -07:00
Pablo Revilla
44ddfe7ed7 update protobuf 2025-10-01 08:08:50 -07:00
Pablo Revilla
fc28dcc53e Merge pull request #52 from jkrauska/master
Minor README Tweaks and gitignore add
2025-10-01 08:05:34 -07:00
Pablo Revilla
81a2c0c7ca update protobuf 2025-10-01 08:01:00 -07:00
Joel Krauska
c7f5467acb Add url parameters to /map to support zoomed view 2025-09-30 17:56:35 -07:00
Joel Krauska
396e5ccbf1 Minor README Tweaks and gitignore add 2025-09-30 17:17:55 -07:00
Cloud Hayes
0a522f9a19 Fix Client_BASE not showing in Mesh Graphs 2025-09-30 17:40:06 -05:00
Pablo Revilla
40c5d4e291 update protobuf 2025-09-25 13:37:55 -07:00
Pablo Revilla
550a266212 update protobuf 2025-09-24 20:28:33 -07:00
Pablo Revilla
238ac409f8 testing new integration 2025-09-24 20:16:10 -07:00
Pablo Revilla
ee640b2cec Update .gitmodules 2025-09-24 20:13:28 -07:00
Pablo Revilla
561d410e6a Updates stats with pie chart and report for channel 2025-09-24 19:55:41 -07:00
Pablo Revilla
a20dafe714 Updates stats with pie chart and report for channel 2025-09-24 19:54:33 -07:00
Pablo Revilla
3cd93c08a7 Updates stats with pie chart and report for channel 2025-09-24 19:39:06 -07:00
Pablo Revilla
11537fdef1 Merge remote-tracking branch 'origin/master' 2025-09-24 19:38:55 -07:00
Pablo Revilla
5068f7acb1 Updates stats with pie chart and report for channel 2025-09-24 17:23:12 -07:00
Pablo Revilla
85f04f485e Merge pull request #36 from madeofstown/master
New Install Procedure
2025-09-24 17:17:16 -07:00
Pablo Revilla
a094b3edd5 Merge branch 'master' into master 2025-09-24 17:17:01 -07:00
Pablo Revilla
8d7f72ac6e Updates stats with pie chart and report for channel 2025-09-22 10:25:24 -07:00
Pablo Revilla
03e198b80c Updates stats with pie chart and report for channel 2025-09-22 10:18:06 -07:00
Pablo Revilla
86b4fa6cbf Update README.md 2025-09-19 21:48:39 -07:00
Pablo Revilla
e6424e3c6d Update README.md 2025-09-19 21:48:13 -07:00
Pablo Revilla
e2c1e311b8 Update README.md 2025-09-19 11:11:25 -07:00
Pablo Revilla
02f63fca70 Work on DB cleanup tool 2025-09-19 10:50:15 -07:00
Pablo Revilla
f9a6f3dff2 Work on DB cleanup tool 2025-09-19 09:20:43 -07:00
Pablo Revilla
0da2ef841c Work on DB cleanup tool 2025-09-19 09:11:00 -07:00
Pablo Revilla
4ffd287c84 Work on DB cleanup tool 2025-09-19 08:50:10 -07:00
Pablo Revilla
ec0dd4ef03 Work on status page 2025-09-18 10:28:55 -07:00
Pablo Revilla
608fde9e9c Work on db cleanup tool 2025-09-18 10:25:27 -07:00
Pablo Revilla
7c40c64de8 Work on db cleanup tool 2025-09-18 09:45:01 -07:00
Pablo Revilla
4f4c18fa14 Work on db cleanup tool 2025-09-18 09:37:24 -07:00
Pablo Revilla
6eb1cdbd2d Work on db cleanup tool 2025-09-18 07:40:25 -07:00
Pablo Revilla
cad3051e7f Work on db cleanup tool 2025-09-18 07:38:58 -07:00
Pablo Revilla
2b9422efbc fixed spelling of variable firehouse_interval 2025-09-18 07:34:26 -07:00
Pablo Revilla
ddb691d4de fixed spelling of variable firehouse_interval 2025-09-17 23:05:05 -07:00
Pablo Revilla
bbab5fefd0 make the /api/config endpoint restrictive to what it provides. It will only show what is needed for the current code. 2025-09-17 23:01:29 -07:00
Pablo Revilla
6e223a066a make the /api/config endpoint restrictive to what it provides. It will only show what is needed for the current code. 2025-09-17 23:00:44 -07:00
Pablo Revilla
61b74473e3 make the /api/config endpoint restrictive to what it provides. It will only show what is needed for the current code. 2025-09-17 22:55:40 -07:00
Pablo Revilla
f06fa3a4a3 Added Database cleanup feature to startdb.py 2025-09-17 22:22:35 -07:00
Pablo Revilla
9d4ebc00f6 Added Database cleanup feature to startdb.py 2025-09-17 22:15:12 -07:00
Pablo Revilla
a69d1a5729 Added Database cleanup feature to startdb.py 2025-09-17 22:12:04 -07:00
madeofstown
7e3076c0e2 Update README.md
symlink target is relative to link location
2025-08-19 19:24:58 -07:00
madeofstown
e3f5c0f006 Delete meshtastic/protobuf 2025-08-19 19:14:48 -07:00
madeofstown
572e79c9ac Update .gitmodules 2025-08-19 19:10:46 -07:00
madeofstown
fb70f644e5 Update README.md
Change install procedure to mitigate broken submodule
2025-08-19 19:08:38 -07:00
madeofstown
954d6300de Update .gitignore 2025-08-19 17:35:26 -07:00
madeofstown
9ceca0eea9 Update .gitmodules 2025-08-19 17:31:36 -07:00
madeofstown
24f768f725 Merge pull request #5 from madeofstown/testing
re-add meshtastic/python submodule
2025-08-19 17:14:35 -07:00
madeofstown
89f3eade15 re-add meshtastic/python submodule 2025-08-19 17:12:40 -07:00
122 changed files with 12191 additions and 5453 deletions

10
.dockerignore Normal file
View File

@@ -0,0 +1,10 @@
# This keeps Docker from including hostOS virtual environment folders
env/
.venv/
# Database files and backups
*.db
*.db-shm
*.db-wal
backups/
*.db.gz

54
.github/workflows/container.yml vendored Normal file
View File

@@ -0,0 +1,54 @@
name: Build container
on:
push:
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
# list of Docker images to use as base name for tags
images: |
ghcr.io/${{ github.repository }}
# latest tag is only set for semver/tag-based builds (default behavior)
flavor: |
latest=auto
# generate Docker tags based on the following events/attributes
tags: |
type=ref,event=branch
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Containerfile
push: ${{ github.event_name != 'pull_request' }}
labels: ${{ steps.meta.outputs.labels }}
tags: ${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64
# optional cache (speeds up rebuilds)
cache-from: type=gha
cache-to: type=gha,mode=max

39
.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: Ruff
on:
pull_request:
paths:
- "**/*.py"
- "pyproject.toml"
- "ruff.toml"
- ".pre-commit-config.yaml"
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: Cache Ruff
uses: actions/cache@v4
with:
path: ~/.cache/ruff
key: ruff-${{ runner.os }}-${{ hashFiles('**/pyproject.toml', '**/ruff.toml') }}
- name: Install Ruff
run: pip install "ruff==0.13.3"
# Lint (with GitHub annotation format for inline PR messages)
- name: Ruff check
run: ruff check --output-format=github .
# Fail PR if formatting is needed
- name: Ruff format (check-only)
run: ruff format --check .
# TODO: Investigate only applying to changed files and possibly apply fixes

41
.gitignore vendored
View File

@@ -1,7 +1,48 @@
env/*
__pycache__/*
meshview/__pycache__/*
alembic/__pycache__/*
meshtastic/protobuf/*
# Database files
packets.db
packets*.db
*.db
*.db-shm
*.db-wal
# Database backups
backups/
*.db.gz
# Process files
meshview-db.pid
meshview-web.pid
# Config and logs
/table_details.py
config.ini
*.log
# Screenshots
screenshots/*
# Python
python/nanopb
__pycache__/
*.pyc
*.pyo
*.pyd
.Python
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
packets.db-journal

1
.gitmodules vendored Normal file
View File

@@ -0,0 +1 @@

8
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,8 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.3 # pin the latest youre comfortable with
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix] # fail if it had to change files
- id: ruff-format

204
AGENTS.md Normal file
View File

@@ -0,0 +1,204 @@
# AI Agent Guidelines for Meshview
This document provides context and guidelines for AI coding assistants working on the Meshview project.
## Project Overview
Meshview is a real-time monitoring and diagnostic tool for Meshtastic mesh networks. It provides web-based visualization and analysis of network activity, including:
- Real-time packet monitoring from MQTT streams
- Interactive map visualization of node locations
- Network topology graphs showing connectivity
- Message traffic analysis and conversation tracking
- Node statistics and telemetry data
- Packet inspection and traceroute analysis
## Architecture
### Core Components
1. **MQTT Reader** (`meshview/mqtt_reader.py`) - Subscribes to MQTT topics and receives mesh packets
2. **Database Manager** (`meshview/database.py`, `startdb.py`) - Handles database initialization and migrations
3. **MQTT Store** (`meshview/mqtt_store.py`) - Processes and stores packets in the database
4. **Web Server** (`meshview/web.py`, `main.py`) - Serves the web interface and API endpoints
5. **API Layer** (`meshview/web_api/api.py`) - REST API endpoints for data access
6. **Models** (`meshview/models.py`) - SQLAlchemy database models
7. **Decode Payload** (`meshview/decode_payload.py`) - Protobuf message decoding
### Technology Stack
- **Python 3.13+** - Main language
- **aiohttp** - Async web framework
- **aiomqtt** - Async MQTT client
- **SQLAlchemy (async)** - ORM with async support
- **Alembic** - Database migrations
- **Jinja2** - Template engine
- **Protobuf** - Message serialization (Meshtastic protocol)
- **SQLite/PostgreSQL** - Database backends (SQLite default, PostgreSQL via asyncpg)
### Key Patterns
- **Async/Await** - All I/O operations are asynchronous
- **Database Migrations** - Use Alembic for schema changes (see `docs/Database-Changes-With-Alembic.md`)
- **Configuration** - INI file-based config (`config.ini`, see `sample.config.ini`)
- **Modular API** - API routes separated into `meshview/web_api/` module
## Project Structure
```
meshview/
├── alembic/ # Database migration scripts
├── docs/ # Technical documentation
├── meshview/ # Main application package
│ ├── static/ # Static web assets (HTML, JS, CSS)
│ ├── templates/ # Jinja2 HTML templates
│ ├── web_api/ # API route handlers
│ └── *.py # Core modules
├── main.py # Web server entry point
├── startdb.py # Database manager entry point
├── mvrun.py # Combined runner (starts both services)
├── config.ini # Runtime configuration
└── requirements.txt # Python dependencies
```
## Development Workflow
### Setup
1. Use Python 3.13+ virtual environment
### Running
- **Database**: `./env/bin/python startdb.py`
- **Web Server**: `./env/bin/python main.py`
- **Both**: `./env/bin/python mvrun.py`
## Code Style
- **Line length**: 100 characters (see `pyproject.toml`)
- **Linting**: Ruff (configured in `pyproject.toml`)
- **Formatting**: Ruff formatter
- **Type hints**: Preferred but not strictly required
- **Async**: Use `async def` and `await` for I/O operations
## Important Files
### Configuration
- `config.ini` - Runtime configuration (server, MQTT, database, cleanup)
- `sample.config.ini` - Template configuration file
- `alembic.ini` - Alembic migration configuration
### Database
- `meshview/models.py` - SQLAlchemy models (Packet, Node, Traceroute, etc.)
- `meshview/database.py` - Database initialization and session management
- `alembic/versions/` - Migration scripts
### Core Logic
- `meshview/mqtt_reader.py` - MQTT subscription and message reception
- `meshview/mqtt_store.py` - Packet processing and storage
- `meshview/decode_payload.py` - Protobuf decoding
- `meshview/web.py` - Web server routes and handlers
- `meshview/web_api/api.py` - REST API endpoints
### Templates
- `meshview/templates/` - Jinja2 HTML templates
- `meshview/static/` - Static files (HTML pages, JS, CSS)
## Common Tasks
### Adding a New API Endpoint
1. Add route handler in `meshview/web_api/api.py`
2. Register route in `meshview/web.py` (if needed)
3. Update `docs/API_Documentation.md` if public API
### Database Schema Changes
1. Modify models in `meshview/models.py`
2. Create migration: `alembic revision --autogenerate -m "description"`
3. Review generated migration in `alembic/versions/`
4. Test migration: `alembic upgrade head`
5. **Never** modify existing migration files after they've been applied
### Adding a New Web Page
1. Create template in `meshview/templates/`
2. Add route in `meshview/web.py`
3. Add navigation link if needed (check existing templates for pattern)
4. Add static assets if needed in `meshview/static/`
### Processing New Packet Types
1. Check `meshview/decode_payload.py` for existing decoders
2. Add decoder function if new type
3. Update `meshview/mqtt_store.py` to handle new packet type
4. Update database models if new data needs storage
## Key Concepts
### Meshtastic Protocol
- Uses Protobuf for message serialization
- Packets contain various message types (text, position, telemetry, etc.)
- MQTT topics follow pattern: `msh/{region}/{subregion}/#`
### Database Schema
- **packet** - Raw packet data
- **node** - Mesh node information
- **traceroute** - Network path information
- **packet_seen** - Packet observation records
### Real-time Updates
- Web pages use Server-Sent Events (SSE) for live updates
- Map and firehose pages auto-refresh based on config intervals
- API endpoints return JSON for programmatic access
## Best Practices
1. **Always use async/await** for database and network operations
2. **Use Alembic** for all database schema changes
3. **Follow existing patterns** - check similar code before adding new features
4. **Update documentation** - keep `docs/` and README current
5. **Test migrations** - verify migrations work both up and down
6. **Handle errors gracefully** - log errors, don't crash on bad packets
7. **Respect configuration** - use `config.ini` values, don't hardcode
## Common Pitfalls
- **Don't modify applied migrations** - create new ones instead
- **Don't block the event loop** - use async I/O, not sync
- **Don't forget timezone handling** - timestamps are stored in UTC
- **Don't hardcode paths** - use configuration values
- **Don't ignore MQTT reconnection** - handle connection failures gracefully
## Resources
- **Main README**: `README.md` - Installation and basic usage
- **Docker Guide**: `README-Docker.md` - Container deployment
- **API Docs**: `docs/API_Documentation.md` - API endpoint reference
- **Migration Guide**: `docs/Database-Changes-With-Alembic.md` - Database workflow
- **Contributing**: `CONTRIBUTING.md` - Contribution guidelines
## Version Information
- **Current Version**: 3.0.0 (November 2025)
- **Python Requirement**: 3.13+
- **Key Features**: Alembic migrations, automated backups, Docker support, traceroute return paths
## Rules for robots
- Always run ruff check and ruff format after making changes (only on python changes)
---
When working on this project, prioritize:
1. Maintaining async patterns
2. Following existing code structure
3. Using proper database migrations
4. Keeping documentation updated
5. Testing changes thoroughly

View File

@@ -1,159 +0,0 @@
# API Documentation
## 1. Chat API
### GET `/api/chat`
Returns the most recent chat messages.
**Query Parameters**
- `limit` (optional, int): Maximum number of messages to return. Default: `100`.
**Response Example**
```json
{
"packets": [
{
"id": 123,
"import_time": "2025-07-22T12:45:00",
"from_node_id": 987654,
"from_node": "Alice",
"channel": "main",
"payload": "Hello, world!"
}
]
}
```
---
### GET `/api/chat/updates`
Returns chat messages imported after a given timestamp.
**Query Parameters**
- `last_time` (optional, ISO timestamp): Only messages imported after this time are returned.
**Response Example**
```json
{
"packets": [
{
"id": 124,
"import_time": "2025-07-22T12:50:00",
"from_node_id": 987654,
"from_node": "Alice",
"channel": "main",
"payload": "New message!"
}
],
"latest_import_time": "2025-07-22T12:50:00"
}
```
---
## 2. Nodes API
### GET `/api/nodes`
Returns a list of all nodes, with optional filtering by last seen.
**Query Parameters**
- `hours` (optional, int): Return nodes seen in the last N hours.
- `days` (optional, int): Return nodes seen in the last N days.
- `last_seen_after` (optional, ISO timestamp): Return nodes seen after this time.
**Response Example**
```json
{
"nodes": [
{
"node_id": 1234,
"long_name": "Alice",
"short_name": "A",
"channel": "main",
"last_seen": "2025-07-22T12:40:00",
"hardware": "T-Beam",
"firmware": "1.2.3",
"role": "client",
"last_lat": 37.7749,
"last_long": -122.4194
}
]
}
```
---
## 3. Packets API
### GET `/api/packets`
Returns a list of packets with optional filters.
**Query Parameters**
- `limit` (optional, int): Maximum number of packets to return. Default: `200`.
- `since` (optional, ISO timestamp): Only packets imported after this timestamp are returned.
**Response Example**
```json
{
"packets": [
{
"id": 123,
"from_node_id": 5678,
"to_node_id": 91011,
"portnum": 1,
"import_time": "2025-07-22T12:45:00",
"payload": "Hello, Bob!"
}
]
}
```
---
### Notes
- All timestamps (`import_time`, `last_seen`) are returned in ISO 8601 format.
- `portnum` is an integer representing the packet type.
- `payload` is always a UTF-8 decoded string.
## 4 Statistics API: GET `/api/stats`
Retrieve packet statistics aggregated by time periods, with optional filtering.
---
## Query Parameters
| Parameter | Type | Required | Default | Description |
|--------------|---------|----------|----------|-------------------------------------------------------------------------------------------------|
| `period_type` | string | No | `hour` | Time granularity of the stats. Allowed values: `hour`, `day`. |
| `length` | integer | No | 24 | Number of periods to include (hours or days). |
| `channel` | string | No | — | Filter results by channel name (case-insensitive). |
| `portnum` | integer | No | — | Filter results by port number. |
| `to_node` | integer | No | — | Filter results to packets sent **to** this node ID. |
| `from_node` | integer | No | — | Filter results to packets sent **from** this node ID. |
---
## Response
```json
{
"period_type": "hour",
"length": 24,
"channel": "LongFast",
"portnum": 1,
"to_node": 12345678,
"from_node": 87654321,
"data": [
{
"period": "2025-08-08 14:00",
"count": 10
},
{
"period": "2025-08-08 15:00",
"count": 7
}
// more entries...
]
}

133
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,133 @@
# Contributing to Meshview
First off, thanks for taking the time to contribute! ❤️
All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for ways to help and details about how this project handles contributions. Please read the relevant section before getting started — it will make things smoother for both you and the maintainers.
The Meshview community looks forward to your contributions. 🎉
> And if you like the project but dont have time to contribute code, thats fine! You can still support Meshview by:
> - ⭐ Starring the repo on GitHub
> - Talking about Meshview on social media
> - Referencing Meshview in your own projects README
> - Mentioning Meshview at local meetups or to colleagues/friends
---
## Table of Contents
- [Code of Conduct](#code-of-conduct)
- [I Have a Question](#i-have-a-question)
- [I Want to Contribute](#i-want-to-contribute)
- [Reporting Bugs](#reporting-bugs)
- [Suggesting Enhancements](#suggesting-enhancements)
- [Your First Code Contribution](#your-first-code-contribution)
- [Improving the Documentation](#improving-the-documentation)
- [Styleguides](#styleguides)
- [Commit Messages](#commit-messages)
- [Join the Project Team](#join-the-project-team)
---
## Code of Conduct
Meshview is an open and welcoming community. We want everyone to feel safe, respected, and valued.
### Our Standards
- Be respectful and considerate in all interactions.
- Welcome new contributors and help them learn.
- Provide constructive feedback, not personal attacks.
- Focus on collaboration and what benefits the community.
Unacceptable behavior includes harassment, insults, hate speech, personal attacks, or publishing others private information without permission.
---
## I Have a Question
> Before asking, please read the [documentation](docs/README.md) if available.
1. Search the [issues list](../../issues) to see if your question has already been asked.
2. If not, open a [new issue](../../issues/new) with the **question** label.
3. Provide as much context as possible (OS, Python version, database type, etc.).
---
## I Want to Contribute
### Legal Notice
By contributing to Meshview, you agree that:
- You authored the content yourself.
- You have the necessary rights to the content.
- Your contribution can be provided under the projects license.
---
### Reporting Bugs
Before submitting a bug report:
- Make sure youre using the latest Meshview version.
- Verify the issue is not due to a misconfigured environment (SQLite/MySQL, Python version, etc.).
- Search existing [bug reports](../../issues?q=label%3Abug).
- Collect relevant information:
- Steps to reproduce
- Error messages / stack traces
- OS, Python version, and database backend
- Any logs (`meshview-db.service`, `mqtt_reader.py`, etc.)
How to report:
- Open a [new issue](../../issues/new).
- Use a **clear and descriptive title**.
- Include reproduction steps and expected vs. actual behavior.
⚠️ Security issues should **not** be reported in public issues. Instead, email us at **meshview-maintainers@proton.me**.
---
### Suggesting Enhancements
Enhancements are tracked as [issues](../../issues). Before suggesting:
- Make sure the feature doesnt already exist.
- Search for prior suggestions.
- Check that it fits Meshviews scope (mesh packet analysis, visualization, telemetry, etc.).
When submitting:
- Use a **clear and descriptive title**.
- Describe the current behavior and what youd like to see instead.
- Include examples, screenshots, or mockups if relevant.
- Explain why it would be useful to most Meshview users.
---
### Your First Code Contribution
We love first-time contributors! 🚀
If youd like to start coding:
1. Look for issues tagged with [good first issue](../../issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22).
2. Fork the repository and clone it locally.
3. Set up the development environment:
4. Run the app locally
5. Create a new branch, make your changes, commit, and push.
6. Open a pull request!
---
### Improving the Documentation
Docs are just as important as code. You can help by:
- Fixing typos or broken links.
- Clarifying confusing instructions.
- Adding examples (e.g., setting up Nginx as a reverse proxy, SQLite vs. MySQL setup).
- Writing or updating tutorials.
---
## Join the Project Team
Meshview is a community-driven project. If you consistently contribute (code, documentation, or community help), wed love to invite you as a maintainer.
Start by contributing regularly, engaging in issues/PRs, and helping others.
---
✨ Thats it! Thanks again for being part of Meshview. Every contribution matters.

79
Containerfile Normal file
View File

@@ -0,0 +1,79 @@
# Build Image
# Uses python:3.13-slim because no native dependencies are needed for meshview itself
# (everything is available as a wheel)
FROM docker.io/python:3.13-slim AS meshview-build
RUN apt-get update && \
apt-get install -y --no-install-recommends curl patch && \
rm -rf /var/lib/apt/lists/*
# Add a non-root user/group
ARG APP_USER=app
RUN useradd -m -u 10001 -s /bin/bash ${APP_USER}
# Install uv and put it on PATH system-wide
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
&& install -m 0755 /root/.local/bin/uv /usr/local/bin/uv
WORKDIR /app
RUN chown -R ${APP_USER}:${APP_USER} /app
# Copy deps first for caching
COPY --chown=${APP_USER}:${APP_USER} pyproject.toml uv.lock* requirements*.txt ./
# Optional: wheels-only to avoid slow source builds
ENV UV_NO_BUILD=1
RUN uv venv /opt/venv
# RUN uv sync --frozen
ENV VIRTUAL_ENV=/opt/venv
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
RUN uv pip install --no-cache-dir --upgrade pip \
&& if [ -f requirements.txt ]; then uv pip install --only-binary=:all: -r requirements.txt; fi
# Copy app code
COPY --chown=${APP_USER}:${APP_USER} . .
# Patch config
RUN patch -p1 < container/config.patch
# Clean
RUN rm -rf /app/.git* && \
rm -rf /app/.pre-commit-config.yaml && \
rm -rf /app/*.md && \
rm -rf /app/COPYING && \
rm -rf /app/Containerfile && \
rm -rf /app/Dockerfile && \
rm -rf /app/container && \
rm -rf /app/docker && \
rm -rf /app/docs && \
rm -rf /app/pyproject.toml && \
rm -rf /app/requirements.txt && \
rm -rf /app/screenshots
# Prepare /app and /opt to copy
RUN mkdir -p /meshview && \
mv /app /opt /meshview
# Use a clean container for install
FROM docker.io/python:3.13-slim
ARG APP_USER=app
COPY --from=meshview-build /meshview /
RUN apt-get update && \
apt-get install -y --no-install-recommends graphviz && \
rm -rf /var/lib/apt/lists/* && \
useradd -m -u 10001 -s /bin/bash ${APP_USER} && \
mkdir -p /etc/meshview /var/lib/meshview /var/log/meshview && \
mv /app/sample.config.ini /etc/meshview/config.ini && \
chown -R ${APP_USER}:${APP_USER} /var/lib/meshview /var/log/meshview
# Drop privileges
USER ${APP_USER}
WORKDIR /app
ENTRYPOINT [ "/opt/venv/bin/python", "mvrun.py"]
CMD ["--pid_dir", "/tmp", "--py_exec", "/opt/venv/bin/python", "--config", "/etc/meshview/config.ini" ]
EXPOSE 8081
VOLUME [ "/etc/meshview", "/var/lib/meshview", "/var/log/meshview" ]

1
Dockerfile Symbolic link
View File

@@ -0,0 +1 @@
Containerfile

247
README-Docker.md Normal file
View File

@@ -0,0 +1,247 @@
# Running MeshView with Docker
MeshView container images are built automatically and published to GitHub Container Registry.
## Quick Start
Pull and run the latest image:
```bash
docker pull ghcr.io/pablorevilla-meshtastic/meshview:latest
docker run -d \
--name meshview \
-p 8081:8081 \
-v ./config:/etc/meshview \
-v ./data:/var/lib/meshview \
-v ./logs:/var/log/meshview \
ghcr.io/pablorevilla-meshtastic/meshview:latest
```
Access the web interface at: http://localhost:8081
## Volume Mounts
The container uses three volumes for persistent data:
| Volume | Purpose | Required |
|--------|---------|----------|
| `/etc/meshview` | Configuration files | Yes |
| `/var/lib/meshview` | Database storage | Recommended |
| `/var/log/meshview` | Log files | Optional |
### Configuration Volume
Mount a directory containing your `config.ini` file:
```bash
-v /path/to/your/config:/etc/meshview
```
If no config is provided, the container will use the default `sample.config.ini`.
### Database Volume
Mount a directory to persist the SQLite database:
```bash
-v /path/to/your/data:/var/lib/meshview
```
**Important:** Without this mount, your database will be lost when the container stops.
### Logs Volume
Mount a directory to access logs from the host:
```bash
-v /path/to/your/logs:/var/log/meshview
```
## Complete Example
Create a directory structure and run:
```bash
# Create directories
mkdir -p meshview/{config,data,logs,backups}
# Copy sample config (first time only)
docker run --rm ghcr.io/pablorevilla-meshtastic/meshview:latest \
cat /etc/meshview/config.ini > meshview/config/config.ini
# Edit config.ini with your MQTT settings
nano meshview/config/config.ini
# Run the container
docker run -d \
--name meshview \
--restart unless-stopped \
-p 8081:8081 \
-v $(pwd)/meshview/config:/etc/meshview \
-v $(pwd)/meshview/data:/var/lib/meshview \
-v $(pwd)/meshview/logs:/var/log/meshview \
ghcr.io/pablorevilla-meshtastic/meshview:latest
```
## Docker Compose
Create a `docker-compose.yml`:
```yaml
version: '3.8'
services:
meshview:
image: ghcr.io/pablorevilla-meshtastic/meshview:latest
container_name: meshview
restart: unless-stopped
ports:
- "8081:8081"
volumes:
- ./config:/etc/meshview
- ./data:/var/lib/meshview
- ./logs:/var/log/meshview
- ./backups:/var/lib/meshview/backups # For database backups
environment:
- TZ=America/Los_Angeles # Set your timezone
```
Run with:
```bash
docker-compose up -d
```
## Configuration
### Minimum Configuration
Edit your `config.ini` to configure MQTT connection:
```ini
[mqtt]
server = mqtt.meshtastic.org
topics = ["msh/US/#"]
port = 1883
username =
password =
[database]
# SQLAlchemy async connection string.
# Examples:
# sqlite+aiosqlite:///var/lib/meshview/packets.db
# postgresql+asyncpg://user:pass@host:5432/meshview
connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
```
### Database Backups
To enable automatic daily backups inside the container:
```ini
[cleanup]
backup_enabled = True
backup_dir = /var/lib/meshview/backups
backup_hour = 2
backup_minute = 00
```
Then mount the backups directory:
```bash
-v $(pwd)/meshview/backups:/var/lib/meshview/backups
```
## Available Tags
| Tag | Description |
|-----|-------------|
| `latest` | Latest build from the main branch |
| `dev-v3` | Development branch |
| `v1.2.3` | Specific version tags |
## Updating
Pull the latest image and restart:
```bash
docker pull ghcr.io/pablorevilla-meshtastic/meshview:latest
docker restart meshview
```
Or with docker-compose:
```bash
docker-compose pull
docker-compose up -d
```
## Logs
View container logs:
```bash
docker logs meshview
# Follow logs
docker logs -f meshview
# Last 100 lines
docker logs --tail 100 meshview
```
## Troubleshooting
### Container won't start
Check logs:
```bash
docker logs meshview
```
### Database permission issues
Ensure the data directory is writable:
```bash
chmod -R 755 meshview/data
```
### Can't connect to MQTT
1. Check your MQTT configuration in `config.ini`
2. Verify network connectivity from the container:
```bash
docker exec meshview ping mqtt.meshtastic.org
```
### Port already in use
Change the host port (left side):
```bash
-p 8082:8081
```
Then access at: http://localhost:8082
## Building Your Own Image
If you want to build from source:
```bash
git clone https://github.com/pablorevilla-meshtastic/meshview.git
cd meshview
docker build -f Containerfile -t meshview:local .
```
## Security Notes
- The container runs as a non-root user (`app`, UID 10001)
- No privileged access required
- Only port 8081 is exposed
- All data stored in mounted volumes
## Support
- GitHub Issues: https://github.com/pablorevilla-meshtastic/meshview/issues
- Documentation: https://github.com/pablorevilla-meshtastic/meshview

333
README.md
View File

@@ -2,13 +2,64 @@
# Meshview
![Start Page](screenshots/animated.gif)
The project serves as a real-time monitoring and diagnostic tool for the Meshtastic mesh network. It provides detailed insights into the network's activity, including message traffic, node positions, and telemetry data.
The project serves as a real-time monitoring and diagnostic tool for the Meshtastic mesh network. It provides detailed insights into network activity, including message traffic, node positions, and telemetry data.
### Version 3.0.3 — January 2026
- Added database support for MySQL (not tested, would love to have someone test!) and PostgreSQL (alongside SQLite) for larger or shared deployments.
- Configuration updated to allow selecting the database backend via connection string.
### Version 3.0.2 — January 2026
- Changes to the Database to will make it so that there is a need for space when updating to the latest. SQlite requires to rebuild the database when droping a column. ( we are droping some of the old columns) so make sure you have 1.2x the size of the db of space in your environment. Depending on how big your db is it would take a long time.
### Version 3.0.1 — December 2025
#### 🌐 Multi-Language Support (i18n)
- New `/api/lang` endpoint for serving translations
- Section-based translation loading (e.g., `?section=firehose`)
- Default language controlled via config file language section
- JSON-based translation files for easy expansion
- Core pages updated to support `data-translate-lang` attributes
### 🛠 Improvements
- Updated UI elements across multiple templates for localization readiness
- General cleanup to support future language additions
### Version 3.0.0 update - November 2025
**Major Infrastructure Improvements:**
* **Database Migrations**: Alembic integration for safe schema upgrades and database versioning
* **Automated Backups**: Independent database backup system with gzip compression (separate from cleanup)
* **Development Tools**: Quick setup script (`setup-dev.sh`) with pre-commit hooks for code quality
* **Docker Support**: Pre-built containers now available on GitHub Container Registry with automatic builds - ogarcia
**New Features:**
* **Traceroute Return Path**: Log and display return path data for traceroute packets - jschrempp
* **Microsecond Timestamps**: Added `import_time_us` columns for higher precision time tracking
**Technical Improvements:**
* Migration from manual SQL to Alembic-managed schema
* Container images use `uv` for faster dependency installation
* Python 3.13 support with slim Debian-based images
* Documentation collection in `docs/` directory
* API routes moved to separate modules for better organization
* /version and /health endpoints added for monitoring
See [README-Docker.md](README-Docker.md) for container deployment and [docs/](docs/) for technical documentation.
### Version 2.0.7 update - September 2025
* New database maintenance capability to automatically keep a specific number of days of data.
* Added configuration for update intervals for both the Live Map and the Firehose pages.
### Version 2.0.6 update - August 2025
* New Live Map (Shows packet feed live)
* New API /api/config (See API documentation)
* New API /api/edges (See API documentation)
* Adds edges to the map (click to see traceroute and neighbours)
### Version 2.0.4 update - August 2025
* New statistic page with more data.
* New API /api/stats (See API documentation).
@@ -37,33 +88,80 @@ The project serves as a real-time monitoring and diagnostic tool for the Meshtas
Samples of currently running instances:
- https://meshview.bayme.sh (SF Bay Area)
- https://www.svme.sh/ (Sacramento Valley)
- https://meshview.nyme.sh/ (New York)
- https://map.wpamesh.net/ (Western Pennsylvania)
- https://meshview.chicagolandmesh.org/ (Chicago)
- https://meshview.mt.gt (Canadaverse)
- https://www.svme.sh (Sacramento Valley)
- https://meshview.nyme.sh (New York)
- https://meshview.socalmesh.org (LA Area)
- https://map.wpamesh.net (Western Pennsylvania)
- https://meshview.chicagolandmesh.org (Chicago)
- https://meshview.mt.gt (Canada)
- https://canadaverse.org (Canada)
- https://meshview.meshtastic.es (Spain)
- https://view.mtnme.sh/ (North Georgia / East Tennessee)
- https://socalmesh.w4hac.com (Southern California)
- https://view.mtnme.sh (North Georgia / East Tennessee)
- https://meshview.lsinfra.de (Hessen - Germany)
- https://map.nswmesh.au/ (Sydney - Australia)
- https://meshview.pvmesh.org/ (Pioneer Valley, Massachusetts)
- https://meshview.pvmesh.org (Pioneer Valley, Massachusetts)
- https://meshview.louisianamesh.org (Louisiana)
- https://www.swlamesh.com (Southwest Louisiana)
- https://meshview.meshcolombia.co (Colombia)
- https://meshview-salzburg.jmt.gr (Salzburg / Austria)
- https://map.cromesh.eu (Coatia)
- https://view.meshdresden.eu (Dresden / Germany)
---
### Updating from 2.x to 3.x
We are adding the use of Alembic. If using GitHub
Update your codebase by running the pull command
```bash
cd meshview
git pull origin master
```
Install Alembic in your environment
```bash
./env/bin/pip install alembic
```
Start your scripts or services. This process will update your database with the latest schema.
## Installing
Requires **`python3.11`** or above.
### Using Docker (Recommended)
The easiest way to run MeshView is using Docker. Pre-built images are available from GitHub Container Registry.
See **[README-Docker.md](README-Docker.md)** for complete Docker installation and usage instructions.
### Manual Installation
Requires **`python3.13`** or above.
Clone the repo from GitHub:
```bash
git clone https://github.com/pablorevilla-meshtastic/meshview.git
cd meshview
```
#### Quick Setup (Recommended)
Run the development setup script:
```bash
./setup-dev.sh
```
This will:
- Create Python virtual environment
- Install all requirements
- Install development tools (pre-commit, pytest)
- Set up pre-commit hooks for code formatting
- Create config.ini from sample
#### Manual Setup
Create a Python virtual environment:
```bash
cd meshview
python3 -m venv env
```
@@ -73,7 +171,7 @@ Install the environment requirements:
./env/bin/pip install -r requirements.txt
```
Install `graphviz`:
Install `graphviz` on MacOS or Debian/Ubuntu Linux:
```bash
sudo apt-get install graphviz
@@ -125,12 +223,18 @@ acme_challenge =
# The domain name of your site.
domain =
# Select language (this represents the name of the json file in the /lang directory)
language = es
# Site title to show in the browser title bar and headers.
title = Bay Area Mesh
# A brief message shown on the homepage.
message = Real time data from around the bay area and beyond.
# Starting URL when loading the index page.
starting = /chat
# Enable or disable site features (as strings: "True" or "False").
nodes = True
conversations = True
@@ -142,16 +246,21 @@ map = True
top = True
# Map boundaries (used for the map view).
# Defaults will show the San Francisco Bay Area
map_top_left_lat = 39
map_top_left_lon = -123
map_bottom_right_lat = 36
map_bottom_right_lon = -121
# Updates intervals in seconds, zero or negative number means no updates
# defaults will be 3 seconds
map_interval=3
firehose_interval=3
# Weekly net details
weekly_net_message = Weekly Mesh check-in. We will keep it open on every Wednesday from 5:00pm for checkins. The message format should be (LONG NAME) - (CITY YOU ARE IN) #BayMeshNet.
net_tag = #BayMeshNet
# -------------------------
# MQTT Broker Configuration
# -------------------------
@@ -160,7 +269,7 @@ net_tag = #BayMeshNet
server = mqtt.bayme.sh
# Topics to subscribe to (as JSON-like list, but still a string).
topics = ["msh/US/bayarea/#", "msh/US/CA/mrymesh/#", "msh/US/CA/sacvalley/#"]
topics = ["msh/US/bayarea/#", "msh/US/CA/mrymesh/#", "msh/US/CA/sacvalley"]
# Port used by MQTT (typically 1883 for unencrypted).
port = 1883
@@ -174,15 +283,60 @@ password = large4cats
# Database Configuration
# -------------------------
[database]
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
# SQLAlchemy async connection string.
# Examples:
# sqlite+aiosqlite:///packets.db
# postgresql+asyncpg://user:pass@host:5432/meshview
connection_string = sqlite+aiosqlite:///packets.db
# -------------------------
# Database Cleanup Configuration
# -------------------------
[cleanup]
# Enable or disable daily cleanup
enabled = False
# Number of days to keep records in the database
days_to_keep = 14
# Time to run daily cleanup (24-hour format)
hour = 2
minute = 00
# Run VACUUM after cleanup
vacuum = False
# -------------------------
# Logging Configuration
# -------------------------
[logging]
# Enable or disable HTTP access logs from the web server
# When disabled, request logs like "GET /api/chat" will not appear
# Application logs (errors, startup messages, etc.) are unaffected
# Set to True to enable, False to disable (default: False)
access_log = False
# Database cleanup logfile location
db_cleanup_logfile = dbcleanup.log
```
---
## NOTE (PostgreSQL setup)**
If you want to use PostgreSQL instead of SQLite:
Install PostgreSQL for your OS.
Create a user and database:
```
`CREATE USER meshview WITH PASSWORD 'change_me';`
`CREATE DATABASE meshview OWNER meshview;`
```
Update `config.ini` example:
```
`connection_string = postgresql+asyncpg://meshview:change_me@localhost:5432/meshview`
```
## Running Meshview
Start the database:
Start the database manager:
```bash
./env/bin/python startdb.py
@@ -209,12 +363,29 @@ Open in your browser: http://localhost:8081/
## Running Meshview with `mvrun.py`
- `mvrun.py` starts both `startdb.py` and `main.py` in separate threads and merges the output.
- It accepts the `--config` argument like the others.
- It accepts several command-line arguments for flexible deployment.
```bash
./env/bin/python mvrun.py
```
**Command-line options:**
- `--config CONFIG` - Path to the configuration file (default: `config.ini`)
- `--pid_dir PID_DIR` - Directory for PID files (default: `.`)
- `--py_exec PY_EXEC` - Path to the Python executable (default: `./env/bin/python`)
**Examples:**
```bash
# Use a specific config file
./env/bin/python mvrun.py --config /etc/meshview/config.ini
# Store PID files in a specific directory
./env/bin/python mvrun.py --pid_dir /var/run/meshview
# Use a different Python executable
./env/bin/python mvrun.py --py_exec /usr/bin/python3
```
---
## Setting Up Systemd Services (Ubuntu)
@@ -303,15 +474,44 @@ sudo systemctl daemon-reload
```
## 5. Database Maintenance
### Database maintnance can now be done via the script itself here is the section from the configuration file.
- Simple to setup
- It will not drop any packets
```
# -------------------------
# Database Cleanup Configuration
# -------------------------
[cleanup]
# Enable or disable daily cleanup
enabled = False
# Number of days to keep records in the database
days_to_keep = 14
# Time to run daily cleanup (24-hour format)
hour = 2
minute = 00
# Run VACUUM after cleanup
vacuum = False
# -------------------------
# Logging Configuration
# -------------------------
[logging]
# Enable or disable HTTP access logs from the web server
access_log = False
# Database cleanup logfile location
db_cleanup_logfile = dbcleanup.log
```
Once changes are done you need to restart the script for changes to load.
### Alternatively we can do it via your OS (This example is Ubuntu like OS)
- Create and save bash script below. (Modify /path/to/file/ to the correct path)
- Name it cleanup.sh
- Make it executable.
```bash
#!/bin/bash
#!/bin/bash
DB_FILE="/path/to/file/packets.db"
# Stop DB service
sudo systemctl stop meshview-db.service
sudo systemctl stop meshview-web.service
@@ -320,10 +520,22 @@ sleep 5
echo "Run cleanup..."
# Run cleanup queries
sqlite3 "$DB_FILE" <<EOF
DELETE FROM packet WHERE import_time < datetime('now', '-14 day');
DELETE FROM packet_seen WHERE import_time < datetime('now', '-14 day');
DELETE FROM traceroute WHERE import_time < datetime('now', '-14 day');
DELETE FROM node WHERE last_update < datetime('now', '-14 day') OR last_update IS NULL OR last_update = '';
DELETE FROM packet
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'packet deleted: ' || changes();
DELETE FROM packet_seen
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'packet_seen deleted: ' || changes();
DELETE FROM traceroute
WHERE import_time_us IS NOT NULL
AND import_time_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'traceroute deleted: ' || changes();
DELETE FROM node
WHERE last_seen_us IS NULL
OR last_seen_us < (strftime('%s','now','-14 days') * 1000000);
SELECT 'node deleted: ' || changes();
VACUUM;
EOF
@@ -333,6 +545,64 @@ sudo systemctl start meshview-web.service
echo "Database cleanup completed on $(date)"
```
- If you are using PostgreSQL, use this version instead (adjust credentials/DB name):
```bash
#!/bin/bash
DB_NAME="meshview"
DB_USER="meshview"
DB_HOST="localhost"
DB_PORT="5432"
# Stop DB service
sudo systemctl stop meshview-db.service
sudo systemctl stop meshview-web.service
sleep 5
echo "Run cleanup..."
# Run cleanup queries
psql "postgresql://${DB_USER}@${DB_HOST}:${DB_PORT}/${DB_NAME}" <<'EOF'
WITH deleted AS (
DELETE FROM packet
WHERE import_time_us IS NOT NULL
AND import_time_us < (EXTRACT(EPOCH FROM (NOW() - INTERVAL '14 days')) * 1000000)
RETURNING 1
)
SELECT 'packet deleted: ' || COUNT(*) FROM deleted;
WITH deleted AS (
DELETE FROM packet_seen
WHERE import_time_us IS NOT NULL
AND import_time_us < (EXTRACT(EPOCH FROM (NOW() - INTERVAL '14 days')) * 1000000)
RETURNING 1
)
SELECT 'packet_seen deleted: ' || COUNT(*) FROM deleted;
WITH deleted AS (
DELETE FROM traceroute
WHERE import_time_us IS NOT NULL
AND import_time_us < (EXTRACT(EPOCH FROM (NOW() - INTERVAL '14 days')) * 1000000)
RETURNING 1
)
SELECT 'traceroute deleted: ' || COUNT(*) FROM deleted;
WITH deleted AS (
DELETE FROM node
WHERE last_seen_us IS NULL
OR last_seen_us < (EXTRACT(EPOCH FROM (NOW() - INTERVAL '14 days')) * 1000000)
RETURNING 1
)
SELECT 'node deleted: ' || COUNT(*) FROM deleted;
VACUUM;
EOF
# Start DB service
sudo systemctl start meshview-db.service
sudo systemctl start meshview-web.service
echo "Database cleanup completed on $(date)"
```
- Schedule running the script on a regular basis.
- In this example it runs every night at 2:00am.
@@ -348,4 +618,19 @@ Add schedule to the bottom of the file (modify /path/to/file/ to the correct pat
Check the log file to see it the script run at the specific time.
---
## Testing
MeshView includes a test suite using pytest. For detailed testing documentation, see [README-testing.md](README-testing.md).
Quick start:
```bash
./env/bin/pytest tests/test_api_simple.py -v
```
---
## Technical Documentation
For more detailed technical documentation including database migrations, architecture details, and advanced topics, see the [docs/](docs/) directory.

120
alembic.ini Normal file
View File

@@ -0,0 +1,120 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# sqlalchemy.url will be set programmatically from meshview config
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = INFO
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(asctime)s %(filename)s:%(lineno)d [pid:%(process)d] %(levelname)s - %(message)s
datefmt = %Y-%m-%d %H:%M:%S

1
alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

102
alembic/env.py Normal file
View File

@@ -0,0 +1,102 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Import models metadata for autogenerate support
from meshview.models import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# Use disable_existing_loggers=False to preserve app logging configuration
if config.config_file_name is not None:
fileConfig(config.config_file_name, disable_existing_loggers=False)
# Add your model's MetaData object here for 'autogenerate' support
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with the given connection."""
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in async mode."""
# Get configuration section
configuration = config.get_section(config.config_ini_section, {})
# If sqlalchemy.url is not set in alembic.ini, try to get it from meshview config
if "sqlalchemy.url" not in configuration:
try:
from meshview.config import CONFIG
configuration["sqlalchemy.url"] = CONFIG["database"]["connection_string"]
except Exception:
# Fallback to a default for initial migration creation
configuration["sqlalchemy.url"] = "sqlite+aiosqlite:///packets.db"
connectable = async_engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode with async support."""
try:
# Event loop is already running, schedule and run the coroutine
import concurrent.futures
with concurrent.futures.ThreadPoolExecutor() as pool:
pool.submit(lambda: asyncio.run(run_async_migrations())).result()
except RuntimeError:
# No event loop running, create one
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,45 @@
"""Add example table
Revision ID: 1717fa5c6545
Revises: c88468b7ab0b
Create Date: 2025-10-26 20:59:04.347066
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '1717fa5c6545'
down_revision: str | None = 'add_time_us_cols'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Create example table with sample columns."""
op.create_table(
'example',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('value', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column(
'created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')
),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
# Create an index on the name column for faster lookups
op.create_index('idx_example_name', 'example', ['name'])
def downgrade() -> None:
"""Remove example table."""
op.drop_index('idx_example_name', table_name='example')
op.drop_table('example')

View File

@@ -0,0 +1,35 @@
"""Add first_seen_us and last_seen_us to node table
Revision ID: 2b5a61bb2b75
Revises: ac311b3782a1
Create Date: 2025-11-05 15:19:13.446724
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '2b5a61bb2b75'
down_revision: str | None = 'ac311b3782a1'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add microsecond epoch timestamp columns for first and last seen times
op.add_column('node', sa.Column('first_seen_us', sa.BigInteger(), nullable=True))
op.add_column('node', sa.Column('last_seen_us', sa.BigInteger(), nullable=True))
op.create_index('idx_node_first_seen_us', 'node', ['first_seen_us'], unique=False)
op.create_index('idx_node_last_seen_us', 'node', ['last_seen_us'], unique=False)
def downgrade() -> None:
# Remove the microsecond epoch timestamp columns and their indexes
op.drop_index('idx_node_last_seen_us', table_name='node')
op.drop_index('idx_node_first_seen_us', table_name='node')
op.drop_column('node', 'last_seen_us')
op.drop_column('node', 'first_seen_us')

View File

@@ -0,0 +1,65 @@
"""Drop import_time columns.
Revision ID: 9f3b1a8d2c4f
Revises: 2b5a61bb2b75
Create Date: 2026-01-09 09:55:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "9f3b1a8d2c4f"
down_revision: str | None = "2b5a61bb2b75"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
conn = op.get_bind()
inspector = sa.inspect(conn)
packet_indexes = {idx["name"] for idx in inspector.get_indexes("packet")}
packet_columns = {col["name"] for col in inspector.get_columns("packet")}
with op.batch_alter_table("packet", schema=None) as batch_op:
if "idx_packet_import_time" in packet_indexes:
batch_op.drop_index("idx_packet_import_time")
if "idx_packet_from_node_time" in packet_indexes:
batch_op.drop_index("idx_packet_from_node_time")
if "import_time" in packet_columns:
batch_op.drop_column("import_time")
packet_seen_columns = {col["name"] for col in inspector.get_columns("packet_seen")}
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
if "import_time" in packet_seen_columns:
batch_op.drop_column("import_time")
traceroute_indexes = {idx["name"] for idx in inspector.get_indexes("traceroute")}
traceroute_columns = {col["name"] for col in inspector.get_columns("traceroute")}
with op.batch_alter_table("traceroute", schema=None) as batch_op:
if "idx_traceroute_import_time" in traceroute_indexes:
batch_op.drop_index("idx_traceroute_import_time")
if "import_time" in traceroute_columns:
batch_op.drop_column("import_time")
def downgrade() -> None:
with op.batch_alter_table("traceroute", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
batch_op.create_index("idx_traceroute_import_time", ["import_time"], unique=False)
with op.batch_alter_table("packet_seen", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
with op.batch_alter_table("packet", schema=None) as batch_op:
batch_op.add_column(sa.Column("import_time", sa.DateTime(), nullable=True))
batch_op.create_index("idx_packet_import_time", [sa.text("import_time DESC")], unique=False)
batch_op.create_index(
"idx_packet_from_node_time",
["from_node_id", sa.text("import_time DESC")],
unique=False,
)

View File

@@ -0,0 +1,31 @@
"""add route_return to traceroute
Revision ID: ac311b3782a1
Revises: 1717fa5c6545
Create Date: 2025-11-04 20:28:33.174137
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'ac311b3782a1'
down_revision: str | None = '1717fa5c6545'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add route_return column to traceroute table
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.add_column(sa.Column('route_return', sa.LargeBinary(), nullable=True))
def downgrade() -> None:
# Remove route_return column from traceroute table
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.drop_column('route_return')

View File

@@ -0,0 +1,74 @@
"""add import_time_us columns
Revision ID: add_time_us_cols
Revises: c88468b7ab0b
Create Date: 2025-11-03 14:10:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'add_time_us_cols'
down_revision: str | None = 'c88468b7ab0b'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Check if columns already exist, add them if they don't
conn = op.get_bind()
inspector = sa.inspect(conn)
# Add import_time_us to packet table
packet_columns = [col['name'] for col in inspector.get_columns('packet')]
if 'import_time_us' not in packet_columns:
with op.batch_alter_table('packet', schema=None) as batch_op:
batch_op.add_column(sa.Column('import_time_us', sa.BigInteger(), nullable=True))
op.create_index(
'idx_packet_import_time_us', 'packet', [sa.text('import_time_us DESC')], unique=False
)
op.create_index(
'idx_packet_from_node_time_us',
'packet',
['from_node_id', sa.text('import_time_us DESC')],
unique=False,
)
# Add import_time_us to packet_seen table
packet_seen_columns = [col['name'] for col in inspector.get_columns('packet_seen')]
if 'import_time_us' not in packet_seen_columns:
with op.batch_alter_table('packet_seen', schema=None) as batch_op:
batch_op.add_column(sa.Column('import_time_us', sa.BigInteger(), nullable=True))
op.create_index(
'idx_packet_seen_import_time_us', 'packet_seen', ['import_time_us'], unique=False
)
# Add import_time_us to traceroute table
traceroute_columns = [col['name'] for col in inspector.get_columns('traceroute')]
if 'import_time_us' not in traceroute_columns:
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.add_column(sa.Column('import_time_us', sa.BigInteger(), nullable=True))
op.create_index(
'idx_traceroute_import_time_us', 'traceroute', ['import_time_us'], unique=False
)
def downgrade() -> None:
# Drop indexes and columns
op.drop_index('idx_traceroute_import_time_us', table_name='traceroute')
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.drop_column('import_time_us')
op.drop_index('idx_packet_seen_import_time_us', table_name='packet_seen')
with op.batch_alter_table('packet_seen', schema=None) as batch_op:
batch_op.drop_column('import_time_us')
op.drop_index('idx_packet_from_node_time_us', table_name='packet')
op.drop_index('idx_packet_import_time_us', table_name='packet')
with op.batch_alter_table('packet', schema=None) as batch_op:
batch_op.drop_column('import_time_us')

View File

@@ -0,0 +1,94 @@
"""Add last_update_us to node and migrate data.
Revision ID: b7c3c2e3a1f0
Revises: 9f3b1a8d2c4f
Create Date: 2026-01-12 10:12:00.000000
"""
from collections.abc import Sequence
from datetime import UTC, datetime
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b7c3c2e3a1f0"
down_revision: str | None = "9f3b1a8d2c4f"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def _parse_datetime(value):
if value is None:
return None
if isinstance(value, datetime):
dt = value
elif isinstance(value, str):
text = value.replace("Z", "+00:00")
try:
dt = datetime.fromisoformat(text)
except ValueError:
return None
else:
return None
if dt.tzinfo is None:
return dt.replace(tzinfo=UTC)
return dt.astimezone(UTC)
def upgrade() -> None:
conn = op.get_bind()
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)
node = sa.table(
"node",
sa.column("id", sa.String()),
sa.column("last_update", sa.DateTime()),
sa.column("last_update_us", sa.BigInteger()),
)
rows = conn.execute(sa.select(node.c.id, node.c.last_update)).all()
for node_id, last_update in rows:
dt = _parse_datetime(last_update)
if dt is None:
continue
last_update_us = int(dt.timestamp() * 1_000_000)
conn.execute(
sa.update(node).where(node.c.id == node_id).values(last_update_us=last_update_us)
)
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_column("last_update")
else:
op.drop_column("node", "last_update")
def downgrade() -> None:
conn = op.get_bind()
op.add_column("node", sa.Column("last_update", sa.DateTime(), nullable=True))
node = sa.table(
"node",
sa.column("id", sa.String()),
sa.column("last_update", sa.DateTime()),
sa.column("last_update_us", sa.BigInteger()),
)
rows = conn.execute(sa.select(node.c.id, node.c.last_update_us)).all()
for node_id, last_update_us in rows:
if last_update_us is None:
continue
dt = datetime.fromtimestamp(last_update_us / 1_000_000, tz=UTC).replace(tzinfo=None)
conn.execute(sa.update(node).where(node.c.id == node_id).values(last_update=dt))
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_index("idx_node_last_update_us")
batch_op.drop_column("last_update_us")
else:
op.drop_index("idx_node_last_update_us", table_name="node")
op.drop_column("node", "last_update_us")

View File

@@ -0,0 +1,160 @@
"""Initial migration
Revision ID: c88468b7ab0b
Revises:
Create Date: 2025-10-26 20:56:50.285200
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'c88468b7ab0b'
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Get connection and inspector to check what exists
conn = op.get_bind()
inspector = sa.inspect(conn)
existing_tables = inspector.get_table_names()
# Create node table if it doesn't exist
if 'node' not in existing_tables:
op.create_table(
'node',
sa.Column('id', sa.String(), nullable=False),
sa.Column('node_id', sa.BigInteger(), nullable=True),
sa.Column('long_name', sa.String(), nullable=True),
sa.Column('short_name', sa.String(), nullable=True),
sa.Column('hw_model', sa.String(), nullable=True),
sa.Column('firmware', sa.String(), nullable=True),
sa.Column('role', sa.String(), nullable=True),
sa.Column('last_lat', sa.BigInteger(), nullable=True),
sa.Column('last_long', sa.BigInteger(), nullable=True),
sa.Column('channel', sa.String(), nullable=True),
sa.Column('last_update', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('node_id'),
)
op.create_index('idx_node_node_id', 'node', ['node_id'], unique=False)
# Create packet table if it doesn't exist
if 'packet' not in existing_tables:
op.create_table(
'packet',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('portnum', sa.Integer(), nullable=True),
sa.Column('from_node_id', sa.BigInteger(), nullable=True),
sa.Column('to_node_id', sa.BigInteger(), nullable=True),
sa.Column('payload', sa.LargeBinary(), nullable=True),
sa.Column('import_time', sa.DateTime(), nullable=True),
sa.Column('import_time_us', sa.BigInteger(), nullable=True),
sa.Column('channel', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('idx_packet_from_node_id', 'packet', ['from_node_id'], unique=False)
op.create_index('idx_packet_to_node_id', 'packet', ['to_node_id'], unique=False)
op.create_index(
'idx_packet_import_time', 'packet', [sa.text('import_time DESC')], unique=False
)
op.create_index(
'idx_packet_import_time_us', 'packet', [sa.text('import_time_us DESC')], unique=False
)
op.create_index(
'idx_packet_from_node_time',
'packet',
['from_node_id', sa.text('import_time DESC')],
unique=False,
)
op.create_index(
'idx_packet_from_node_time_us',
'packet',
['from_node_id', sa.text('import_time_us DESC')],
unique=False,
)
# Create packet_seen table if it doesn't exist
if 'packet_seen' not in existing_tables:
op.create_table(
'packet_seen',
sa.Column('packet_id', sa.BigInteger(), nullable=False),
sa.Column('node_id', sa.BigInteger(), nullable=False),
sa.Column('rx_time', sa.BigInteger(), nullable=False),
sa.Column('hop_limit', sa.Integer(), nullable=True),
sa.Column('hop_start', sa.Integer(), nullable=True),
sa.Column('channel', sa.String(), nullable=True),
sa.Column('rx_snr', sa.Float(), nullable=True),
sa.Column('rx_rssi', sa.Integer(), nullable=True),
sa.Column('topic', sa.String(), nullable=True),
sa.Column('import_time', sa.DateTime(), nullable=True),
sa.Column('import_time_us', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
['packet_id'],
['packet.id'],
),
sa.PrimaryKeyConstraint('packet_id', 'node_id', 'rx_time'),
)
op.create_index('idx_packet_seen_node_id', 'packet_seen', ['node_id'], unique=False)
op.create_index('idx_packet_seen_packet_id', 'packet_seen', ['packet_id'], unique=False)
op.create_index(
'idx_packet_seen_import_time_us', 'packet_seen', ['import_time_us'], unique=False
)
# Create traceroute table if it doesn't exist
if 'traceroute' not in existing_tables:
op.create_table(
'traceroute',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('packet_id', sa.BigInteger(), nullable=True),
sa.Column('gateway_node_id', sa.BigInteger(), nullable=True),
sa.Column('done', sa.Boolean(), nullable=True),
sa.Column('route', sa.LargeBinary(), nullable=True),
sa.Column('import_time', sa.DateTime(), nullable=True),
sa.Column('import_time_us', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
['packet_id'],
['packet.id'],
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('idx_traceroute_import_time', 'traceroute', ['import_time'], unique=False)
op.create_index(
'idx_traceroute_import_time_us', 'traceroute', ['import_time_us'], unique=False
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Drop traceroute table and indexes
op.drop_index('idx_traceroute_import_time_us', table_name='traceroute')
op.drop_index('idx_traceroute_import_time', table_name='traceroute')
op.drop_table('traceroute')
# Drop packet_seen table and indexes
op.drop_index('idx_packet_seen_import_time_us', table_name='packet_seen')
op.drop_index('idx_packet_seen_packet_id', table_name='packet_seen')
op.drop_index('idx_packet_seen_node_id', table_name='packet_seen')
op.drop_table('packet_seen')
# Drop packet table and indexes
op.drop_index('idx_packet_from_node_time_us', table_name='packet')
op.drop_index('idx_packet_from_node_time', table_name='packet')
op.drop_index('idx_packet_import_time_us', table_name='packet')
op.drop_index('idx_packet_import_time', table_name='packet')
op.drop_index('idx_packet_to_node_id', table_name='packet')
op.drop_index('idx_packet_from_node_id', table_name='packet')
op.drop_table('packet')
# Drop node table and indexes
op.drop_index('idx_node_node_id', table_name='node')
op.drop_table('node')
# ### end Alembic commands ###

View File

@@ -0,0 +1,34 @@
"""Drop last_update_us from node.
Revision ID: d4d7b0c2e1a4
Revises: b7c3c2e3a1f0
Create Date: 2026-01-12 10:20:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "d4d7b0c2e1a4"
down_revision: str | None = "b7c3c2e3a1f0"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
conn = op.get_bind()
if conn.dialect.name == "sqlite":
with op.batch_alter_table("node", schema=None) as batch_op:
batch_op.drop_index("idx_node_last_update_us")
batch_op.drop_column("last_update_us")
else:
op.drop_index("idx_node_last_update_us", table_name="node")
op.drop_column("node", "last_update_us")
def downgrade() -> None:
op.add_column("node", sa.Column("last_update_us", sa.BigInteger(), nullable=True))
op.create_index("idx_node_last_update_us", "node", ["last_update_us"], unique=False)

57
container/build-container.sh Executable file
View File

@@ -0,0 +1,57 @@
#!/bin/sh
#
# build-container.sh
#
# Script to build MeshView container images
set -e
# Default values
IMAGE_NAME="meshview"
TAG="latest"
CONTAINERFILE="Containerfile"
# Parse arguments
while [ $# -gt 0 ]; do
case "$1" in
--tag|-t)
TAG="$2"
shift 2
;;
--name|-n)
IMAGE_NAME="$2"
shift 2
;;
--file|-f)
CONTAINERFILE="$2"
shift 2
;;
--help|-h)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -t, --tag TAG Tag for the image (default: latest)"
echo " -n, --name NAME Image name (default: meshview)"
echo " -f, --file FILE Containerfile path (default: Containerfile)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
echo "Use --help for usage information"
exit 1
;;
esac
done
echo "Building MeshView container image..."
echo " Image: ${IMAGE_NAME}:${TAG}"
echo " Containerfile: ${CONTAINERFILE}"
echo ""
# Build the container
docker build -f "${CONTAINERFILE}" -t "${IMAGE_NAME}:${TAG}" .
echo ""
echo "Build complete!"
echo "Run with: docker run --rm -p 8081:8081 ${IMAGE_NAME}:${TAG}"

37
container/config.patch Normal file
View File

@@ -0,0 +1,37 @@
diff --git a/sample.config.ini b/sample.config.ini
index 0e64980..494685c 100644
--- a/sample.config.ini
+++ b/sample.config.ini
@@ -3,7 +3,7 @@
# -------------------------
[server]
# The address to bind the server to. Use * to listen on all interfaces.
-bind = *
+bind = 0.0.0.0
# Port to run the web server on.
port = 8081
@@ -64,7 +64,7 @@ net_tag = #BayMeshNet
# -------------------------
[mqtt]
# MQTT server hostname or IP.
-server = mqtt.bayme.sh
+server = mqtt.meshtastic.org
# Topics to subscribe to (as JSON-like list, but still a string).
topics = ["msh/US/bayarea/#", "msh/US/CA/mrymesh/#", "msh/US/CA/sacvalley"]
@@ -82,7 +82,7 @@ password = large4cats
# -------------------------
[database]
# SQLAlchemy connection string. This one uses SQLite with asyncio support.
-connection_string = sqlite+aiosqlite:///packets.db
+connection_string = sqlite+aiosqlite:////var/lib/meshview/packets.db
# -------------------------
@@ -110,4 +110,4 @@ vacuum = False
# Set to True to enable, False to disable (default: False)
access_log = False
# Database cleanup logfile
-db_cleanup_logfile = dbcleanup.log
+db_cleanup_logfile = /var/log/meshview/dbcleanup.log

52
create_example_migration.py Executable file
View File

@@ -0,0 +1,52 @@
#!/usr/bin/env python3
"""
Script to create a blank migration for manual editing.
Usage:
./env/bin/python create_example_migration.py
This creates an empty migration file that you can manually edit to add
custom migration logic (data migrations, complex schema changes, etc.)
Unlike create_migration.py which auto-generates from model changes,
this creates a blank template for you to fill in.
"""
import os
import sys
# Add current directory to path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from alembic.config import Config
from alembic import command
# Create Alembic config
alembic_cfg = Config("alembic.ini")
# Set database URL from meshview config
try:
from meshview.config import CONFIG
database_url = CONFIG["database"]["connection_string"]
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
print(f"Using database URL from config: {database_url}")
except Exception as e:
print(f"Warning: Could not load meshview config: {e}")
print("Using default database URL")
alembic_cfg.set_main_option("sqlalchemy.url", "sqlite+aiosqlite:///packets.db")
# Generate blank migration
try:
print("Creating blank migration for manual editing...")
command.revision(alembic_cfg, autogenerate=False, message="Manual migration")
print("✓ Successfully created blank migration!")
print("\nNow edit the generated file in alembic/versions/")
print("Add your custom upgrade() and downgrade() logic")
except Exception as e:
print(f"✗ Error creating migration: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

58
create_migration.py Executable file
View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python3
"""
Helper script to create Alembic migrations from SQLAlchemy model changes.
Usage:
./env/bin/python create_migration.py
This will:
1. Load your current models from meshview/models.py
2. Compare them to the current database schema
3. Auto-generate a migration with the detected changes
4. Save the migration to alembic/versions/
After running this, review the generated migration file before committing!
"""
import os
import sys
# Add current directory to path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from alembic.config import Config
from alembic import command
# Create Alembic config
alembic_cfg = Config("alembic.ini")
# Set database URL from meshview config
try:
from meshview.config import CONFIG
database_url = CONFIG["database"]["connection_string"]
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
print(f"Using database URL from config: {database_url}")
except Exception as e:
print(f"Warning: Could not load meshview config: {e}")
print("Using default database URL")
alembic_cfg.set_main_option("sqlalchemy.url", "sqlite+aiosqlite:///packets.db")
# Generate migration
try:
print("\nComparing models to current database schema...")
print("Generating migration...\n")
command.revision(alembic_cfg, autogenerate=True, message="Auto-generated migration")
print("\n✓ Successfully created migration!")
print("\nNext steps:")
print("1. Review the generated file in alembic/versions/")
print("2. Edit the migration message/logic if needed")
print("3. Test the migration: ./env/bin/alembic upgrade head")
print("4. Commit the migration file to version control")
except Exception as e:
print(f"\n✗ Error creating migration: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

View File

@@ -1,28 +0,0 @@
FROM python:3.12-slim
# Set work directory
WORKDIR /app
# Install system dependencies (graphviz required, git for cloning)
RUN apt-get update && \
apt-get install -y --no-install-recommends git graphviz && \
rm -rf /var/lib/apt/lists/*
# Clone the repo with submodules
RUN git clone --recurse-submodules https://github.com/pablorevilla-meshtastic/meshview.git /app
# Create virtual environment
RUN python -m venv /app/env
# Upgrade pip and install requirements in venv
RUN /app/env/bin/pip install --no-cache-dir --upgrade pip && \
/app/env/bin/pip install --no-cache-dir -r /app/requirements.txt
# Copy sample config
RUN cp /app/sample.config.ini /app/config.ini
# Expose port
EXPOSE 8081
# Run the app via venv
CMD ["/app/env/bin/python", "/app/mvrun.py"]

View File

@@ -1,44 +1,36 @@
# MeshView Docker Container
This Dockerfile builds a containerized version of the [MeshView](https://github.com/pablorevilla-meshtastic/meshview) application. It uses a lightweight Python environment and sets up the required virtual environment as expected by the application.
> **Note:** This directory contains legacy Docker build files.
>
> **For current Docker usage instructions, please see [README-Docker.md](../README-Docker.md) in the project root.**
## Image Details
## Current Approach
- **Base Image**: `python:3.12-slim`
- **Working Directory**: `/app`
- **Python Virtual Environment**: `/app/env`
Pre-built container images are automatically built and published to GitHub Container Registry:
```bash
docker pull ghcr.io/pablorevilla-meshtastic/meshview:latest
```
See **[README-Docker.md](../README-Docker.md)** for:
- Quick start instructions
- Volume mount configuration
- Docker Compose examples
- Backup configuration
- Troubleshooting
## Legacy Build (Not Recommended)
If you need to build your own image for development:
```bash
# From project root
docker build -f Containerfile -t meshview:local .
```
The current Containerfile uses:
- **Base Image**: `python:3.13-slim` (Debian-based)
- **Build tool**: `uv` for fast dependency installation
- **User**: Non-root user `app` (UID 10001)
- **Exposed Port**: `8081`
## Build Instructions
Build the Docker image:
```bash
docker build -t meshview-docker .
```
## Run Instructions
Run the container:
```bash
docker run -d --name meshview-docker -p 8081:8081 meshview-docker
```
This maps container port `8081` to your host. The application runs via:
```bash
/app/env/bin/python /app/mvrun.py
```
## Web Interface
Once the container is running, you can access the MeshView web interface by visiting:
http://localhost:8081
If running on a remote server, replace `localhost` with the host's IP or domain name:
http://<host>:8081
Ensure that port `8081` is open and not blocked by a firewall or security group.
- **Volumes**: `/etc/meshview`, `/var/lib/meshview`, `/var/log/meshview`

361
docs/ALEMBIC_SETUP.md Normal file
View File

@@ -0,0 +1,361 @@
# Alembic Database Migration Setup
This document describes the automatic database migration system implemented for MeshView using Alembic.
## Overview
The system provides automatic database schema migrations with coordination between the writer app (startdb.py) and reader app (web.py):
- **Writer App**: Automatically runs pending migrations on startup
- **Reader App**: Waits for migrations to complete before starting
## Architecture
### Key Components
1. **`meshview/migrations.py`** - Migration management utilities
- `run_migrations()` - Runs pending migrations (writer app)
- `wait_for_migrations()` - Waits for schema to be current (reader app)
- `is_database_up_to_date()` - Checks schema version
- Migration status tracking table
2. **`alembic/`** - Alembic migration directory
- `env.py` - Configured for async SQLAlchemy support
- `versions/` - Migration scripts directory
- `alembic.ini` - Alembic configuration
3. **Modified Apps**:
- `startdb.py` - Writer app that runs migrations before MQTT ingestion
- `meshview/web.py` - Reader app that waits for schema updates
## How It Works - Automatic In-Place Updates
### ✨ Fully Automatic Operation
**No manual migration commands needed!** The database schema updates automatically when you:
1. Deploy new code with migration files
2. Restart the applications
### Writer App (startdb.py) Startup Sequence
1. Initialize database connection
2. Create migration status tracking table
3. Set "migration in progress" flag
4. **🔄 Automatically run any pending Alembic migrations** (synchronously)
- Detects current schema version
- Compares to latest available migration
- Runs all pending migrations in sequence
- Updates database schema in place
5. Clear "migration in progress" flag
6. Start MQTT ingestion and other tasks
### Reader App (web.py) Startup Sequence
1. Initialize database connection
2. **Check database schema version**
3. If not up to date:
- Wait up to 60 seconds (30 retries × 2 seconds)
- Check every 2 seconds for schema updates
- Automatically proceeds once writer completes migrations
4. Once schema is current, start web server
### 🎯 Key Point: Zero Manual Steps
When you deploy new code with migrations:
```bash
# Just start the apps - migrations happen automatically!
./env/bin/python startdb.py # Migrations run here automatically
./env/bin/python main.py # Waits for migrations, then starts
```
**The database updates itself!** No need to run `alembic upgrade` manually.
### Coordination
The apps coordinate using:
- **Alembic version table** (`alembic_version`) - Tracks current schema version
- **Migration status table** (`migration_status`) - Optional flag for "in progress" state
## Creating New Migrations
### Using the helper script:
```bash
./env/bin/python create_migration.py
```
### Manual creation:
```bash
./env/bin/alembic revision --autogenerate -m "Description of changes"
```
This will:
1. Compare current database schema with SQLAlchemy models
2. Generate a migration script in `alembic/versions/`
3. Automatically detect most schema changes
### Manual migration (advanced):
```bash
./env/bin/alembic revision -m "Manual migration"
```
Then edit the generated file to add custom migration logic.
## Running Migrations
### Automatic (Recommended)
Migrations run automatically when the writer app starts:
```bash
./env/bin/python startdb.py
```
### Manual
To run migrations manually:
```bash
./env/bin/alembic upgrade head
```
To downgrade:
```bash
./env/bin/alembic downgrade -1 # Go back one version
./env/bin/alembic downgrade base # Go back to beginning
```
## Checking Migration Status
Check current database version:
```bash
./env/bin/alembic current
```
View migration history:
```bash
./env/bin/alembic history
```
## Benefits
1. **Zero Manual Intervention**: Migrations run automatically on startup
2. **Safe Coordination**: Reader won't connect to incompatible schema
3. **Version Control**: All schema changes tracked in git
4. **Rollback Capability**: Can downgrade if needed
5. **Auto-generation**: Most migrations created automatically from model changes
## Migration Workflow
### Development Process
1. **Modify SQLAlchemy models** in `meshview/models.py`
2. **Create migration**:
```bash
./env/bin/python create_migration.py
```
3. **Review generated migration** in `alembic/versions/`
4. **Test migration**:
- Stop all apps
- Start writer app (migrations run automatically)
- Start reader app (waits for schema to be current)
5. **Commit migration** to version control
### Production Deployment
1. **Deploy new code** with migration scripts
2. **Start writer app** - Migrations run automatically
3. **Start reader app** - Waits for migrations, then starts
4. **Monitor logs** for migration success
## Troubleshooting
### Migration fails
Check logs in writer app for error details. To manually fix:
```bash
./env/bin/alembic current # Check current version
./env/bin/alembic history # View available versions
./env/bin/alembic upgrade head # Try manual upgrade
```
### Reader app won't start (timeout)
Check if writer app is running and has completed migrations:
```bash
./env/bin/alembic current
```
### Reset to clean state
⚠️ **Warning: This will lose all data**
```bash
rm packets.db # Or your database file
./env/bin/alembic upgrade head # Create fresh schema
```
## File Structure
```
meshview/
├── alembic.ini # Alembic configuration
├── alembic/
│ ├── env.py # Async-enabled migration runner
│ ├── script.py.mako # Migration template
│ └── versions/ # Migration scripts
│ └── c88468b7ab0b_initial_migration.py
├── meshview/
│ ├── models.py # SQLAlchemy models (source of truth)
│ ├── migrations.py # Migration utilities
│ ├── mqtt_database.py # Writer database connection
│ └── database.py # Reader database connection
├── startdb.py # Writer app (runs migrations)
├── main.py # Entry point for reader app
└── create_migration.py # Helper script for creating migrations
```
## Configuration
Database URL is read from `config.ini`:
```ini
[database]
connection_string = sqlite+aiosqlite:///packets.db
```
Alembic automatically uses this configuration through `meshview/migrations.py`.
## Important Notes
1. **Always test migrations** in development before deploying to production
2. **Backup database** before running migrations in production
3. **Check for data loss** - Some migrations may require data migration logic
4. **Coordinate deployments** - Start writer before readers in multi-instance setups
5. **Monitor logs** during first startup after deployment
## Example Migrations
### Example 1: Generated Initial Migration
Here's what an auto-generated migration looks like (from comparing models to database):
```python
"""Initial migration
Revision ID: c88468b7ab0b
Revises:
Create Date: 2025-01-26 20:56:50.123456
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers
revision = 'c88468b7ab0b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Upgrade operations
op.create_table('node',
sa.Column('id', sa.String(), nullable=False),
sa.Column('node_id', sa.BigInteger(), nullable=True),
# ... more columns
sa.PrimaryKeyConstraint('id')
)
def downgrade() -> None:
# Downgrade operations
op.drop_table('node')
```
### Example 2: Manual Migration Adding a New Table
We've included an example migration (`1717fa5c6545_add_example_table.py`) that demonstrates how to manually create a new table:
```python
"""Add example table
Revision ID: 1717fa5c6545
Revises: c88468b7ab0b
Create Date: 2025-10-26 20:59:04.347066
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
def upgrade() -> None:
"""Create example table with sample columns."""
op.create_table(
'example',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('value', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column('created_at', sa.DateTime(), nullable=False,
server_default=sa.text('CURRENT_TIMESTAMP')),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Create an index on the name column for faster lookups
op.create_index('idx_example_name', 'example', ['name'])
def downgrade() -> None:
"""Remove example table."""
op.drop_index('idx_example_name', table_name='example')
op.drop_table('example')
```
**Key features demonstrated:**
- Various column types (Integer, String, Text, Float, Boolean, DateTime)
- Primary key with autoincrement
- Nullable and non-nullable columns
- Server defaults (for timestamps and booleans)
- Creating indexes
- Proper downgrade that reverses all changes
**To test this migration:**
```bash
# Apply the migration
./env/bin/alembic upgrade head
# Check it was applied
./env/bin/alembic current
# Verify table was created
sqlite3 packetsPL.db "SELECT sql FROM sqlite_master WHERE type='table' AND name='example';"
# Roll back the migration
./env/bin/alembic downgrade -1
# Verify table was removed
sqlite3 packetsPL.db "SELECT name FROM sqlite_master WHERE type='table' AND name='example';"
```
**To remove this example migration** (after testing):
```bash
# First make sure you're not on this revision
./env/bin/alembic downgrade c88468b7ab0b
# Then delete the migration file
rm alembic/versions/1717fa5c6545_add_example_table.py
```
## References
- [Alembic Documentation](https://alembic.sqlalchemy.org/)
- [SQLAlchemy Documentation](https://docs.sqlalchemy.org/)
- [Async SQLAlchemy](https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html)

392
docs/API_Documentation.md Normal file
View File

@@ -0,0 +1,392 @@
# API Documentation
Base URL: `http(s)://<host>`
All endpoints return JSON. Timestamps are either ISO 8601 strings or `*_us` values in
microseconds since epoch.
## 1. Nodes API
### GET `/api/nodes`
Returns a list of nodes, with optional filtering.
Query Parameters
- `node_id` (optional, int): Exact node ID.
- `role` (optional, string): Node role.
- `channel` (optional, string): Channel name.
- `hw_model` (optional, string): Hardware model.
- `days_active` (optional, int): Nodes seen within the last N days.
Response Example
```json
{
"nodes": [
{
"id": 42,
"node_id": 1234,
"long_name": "Alice",
"short_name": "A",
"hw_model": "T-Beam",
"firmware": "1.2.3",
"role": "client",
"last_lat": 377749000,
"last_long": -1224194000,
"channel": "main",
"last_seen_us": 1736370123456789
}
]
}
```
---
## 2. Packets API
### GET `/api/packets`
Returns packets with optional filters.
Query Parameters
- `packet_id` (optional, int): Return exactly one packet (overrides other filters).
- `limit` (optional, int): Max packets to return, clamped 1-1000. Default: `50`.
- `since` (optional, int): Only packets imported after this microsecond timestamp.
- `portnum` (optional, int): Filter by port number.
- `contains` (optional, string): Payload substring filter.
- `from_node_id` (optional, int): Filter by sender node ID.
- `to_node_id` (optional, int): Filter by recipient node ID.
- `node_id` (optional, int): Legacy filter matching either from or to node ID.
Response Example
```json
{
"packets": [
{
"id": 123,
"import_time_us": 1736370123456789,
"channel": "main",
"from_node_id": 5678,
"to_node_id": 91011,
"portnum": 1,
"long_name": "Alice",
"payload": "Hello, Bob!",
"to_long_name": "Bob",
"reply_id": 122
}
],
"latest_import_time": 1736370123456789
}
```
Notes
- For `portnum=1` (text messages), packets are filtered to remove sequence-only payloads.
- `latest_import_time` is returned when available for incremental polling (microseconds).
---
## 3. Channels API
### GET `/api/channels`
Returns channels seen in a time period.
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
- `length` (optional, int): Number of periods to look back. Default: `24`.
Response Example
```json
{
"channels": ["LongFast", "MediumFast", "ShortFast"]
}
```
---
## 4. Stats API
### GET `/api/stats`
Returns packet statistics aggregated by time periods, with optional filtering.
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `hour`.
- `length` (optional, int): Number of periods to include. Default: `24`.
- `channel` (optional, string): Filter by channel (case-insensitive).
- `portnum` (optional, int): Filter by port number.
- `to_node` (optional, int): Filter by destination node ID.
- `from_node` (optional, int): Filter by source node ID.
- `node` (optional, int): If provided, return combined `sent` and `seen` totals for that node.
Response Example (series)
```json
{
"period_type": "hour",
"length": 24,
"channel": "LongFast",
"portnum": 1,
"to_node": 12345678,
"from_node": 87654321,
"data": [
{ "period": "2025-08-08 14:00", "count": 10 },
{ "period": "2025-08-08 15:00", "count": 7 }
]
}
```
Response Example (`node` totals)
```json
{
"node_id": 12345678,
"period_type": "hour",
"length": 24,
"sent": 42,
"seen": 58
}
```
---
### GET `/api/stats/count`
Returns total packet counts, optionally filtered.
Query Parameters
- `packet_id` (optional, int): Filter packet_seen by packet ID.
- `period_type` (optional, string): `hour` or `day`.
- `length` (optional, int): Number of periods to include.
- `channel` (optional, string): Filter by channel.
- `from_node` (optional, int): Filter by source node ID.
- `to_node` (optional, int): Filter by destination node ID.
Response Example
```json
{
"total_packets": 12345,
"total_seen": 67890
}
```
---
### GET `/api/stats/top`
Returns nodes sorted by packets seen, with pagination.
Query Parameters
- `period_type` (optional, string): `hour` or `day`. Default: `day`.
- `length` (optional, int): Number of periods to include. Default: `1`.
- `channel` (optional, string): Filter by channel.
- `limit` (optional, int): Max nodes to return. Default: `20`, max `100`.
- `offset` (optional, int): Pagination offset. Default: `0`.
Response Example
```json
{
"total": 250,
"limit": 20,
"offset": 0,
"nodes": [
{
"node_id": 1234,
"long_name": "Alice",
"short_name": "A",
"channel": "main",
"sent": 100,
"seen": 240,
"avg": 2.4
}
]
}
```
---
## 5. Edges API
### GET `/api/edges`
Returns network edges (connections between nodes) based on traceroutes and neighbor info.
Traceroute edges are collected over the last 12 hours. Neighbor edges are based on
port 71 packets.
Query Parameters
- `type` (optional, string): `traceroute` or `neighbor`. If omitted, returns both.
- `node_id` (optional, int): Filter edges to only those touching a node.
Response Example
```json
{
"edges": [
{ "from": 12345678, "to": 87654321, "type": "traceroute" },
{ "from": 11111111, "to": 22222222, "type": "neighbor" }
]
}
```
---
## 6. Config API
### GET `/api/config`
Returns a safe subset of server configuration.
Response Example
```json
{
"site": {
"domain": "example.com",
"language": "en",
"title": "Meshview",
"message": "",
"starting": "/chat",
"nodes": "true",
"chat": "true",
"everything": "true",
"graphs": "true",
"stats": "true",
"net": "true",
"map": "true",
"top": "true",
"map_top_left_lat": 39.0,
"map_top_left_lon": -123.0,
"map_bottom_right_lat": 36.0,
"map_bottom_right_lon": -121.0,
"map_interval": 3,
"firehose_interval": 3,
"weekly_net_message": "Weekly Mesh check-in message.",
"net_tag": "#BayMeshNet",
"version": "3.0.0"
},
"mqtt": {
"server": "mqtt.example.com",
"topics": ["msh/region/#"]
},
"cleanup": {
"enabled": "false",
"days_to_keep": "14",
"hour": "2",
"minute": "0",
"vacuum": "false"
}
}
```
---
## 7. Language API
### GET `/api/lang`
Returns translation strings.
Query Parameters
- `lang` (optional, string): Language code (e.g., `en`, `es`). Default from config or `en`.
- `section` (optional, string): Return only one section (e.g., `nodelist`, `firehose`).
Response Example
```json
{
"title": "Meshview",
"search_placeholder": "Search..."
}
```
---
## 8. Packets Seen API
### GET `/api/packets_seen/{packet_id}`
Returns packet_seen entries for a packet.
Path Parameters
- `packet_id` (required, int): Packet ID.
Response Example
```json
{
"seen": [
{
"packet_id": 123,
"node_id": 456,
"rx_time": "2025-07-22T12:45:00",
"hop_limit": 7,
"hop_start": 0,
"channel": "main",
"rx_snr": 5.0,
"rx_rssi": -90,
"topic": "msh/region/#",
"import_time_us": 1736370123456789
}
]
}
```
---
## 9. Traceroute API
### GET `/api/traceroute/{packet_id}`
Returns traceroute details and derived paths for a packet.
Path Parameters
- `packet_id` (required, int): Packet ID.
Response Example
```json
{
"packet": {
"id": 123,
"from": 111,
"to": 222,
"channel": "main"
},
"traceroute_packets": [
{
"index": 0,
"gateway_node_id": 333,
"done": true,
"forward_hops": [111, 444, 222],
"reverse_hops": [222, 444, 111]
}
],
"unique_forward_paths": [
{ "path": [111, 444, 222], "count": 2 }
],
"unique_reverse_paths": [
[222, 444, 111]
],
"winning_paths": [
[111, 444, 222]
]
}
```
---
## 10. Health API
### GET `/health`
Returns service health and database status.
Response Example
```json
{
"status": "healthy",
"timestamp": "2025-07-22T12:45:00+00:00",
"version": "3.0.3",
"git_revision": "abc1234",
"database": "connected",
"database_size": "12.34 MB",
"database_size_bytes": 12939444
}
```
---
## 11. Version API
### GET `/version`
Returns version metadata.
Response Example
```json
{
"version": "3.0.3",
"release_date": "2026-1-15",
"git_revision": "abc1234",
"git_revision_short": "abc1234"
}
```

View File

@@ -0,0 +1,146 @@
# Database Changes With Alembic
This guide explains how to make database schema changes in MeshView using Alembic migrations.
## Overview
When you need to add, modify, or remove columns from database tables, you must:
1. Update the SQLAlchemy model
2. Create an Alembic migration
3. Let the system automatically apply the migration
## Step-by-Step Process
### 1. Update the Model
Edit `meshview/models.py` to add/modify the column in the appropriate model class:
```python
class Traceroute(Base):
__tablename__ = "traceroute"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
# ... existing columns ...
route_return: Mapped[bytes] = mapped_column(nullable=True) # New column
```
### 2. Create an Alembic Migration
Generate a new migration file with a descriptive message:
```bash
./env/bin/alembic revision -m "add route_return to traceroute"
```
This creates a new file in `alembic/versions/` with a unique revision ID.
### 3. Fill in the Migration
Edit the generated migration file to implement the actual database changes:
```python
def upgrade() -> None:
# Add route_return column to traceroute table
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.add_column(sa.Column('route_return', sa.LargeBinary(), nullable=True))
def downgrade() -> None:
# Remove route_return column from traceroute table
with op.batch_alter_table('traceroute', schema=None) as batch_op:
batch_op.drop_column('route_return')
```
### 4. Migration Runs Automatically
When you restart the application with `mvrun.py`:
1. The writer process (`startdb.py`) starts up
2. It checks if the database schema is up to date
3. If new migrations are pending, it runs them automatically
4. The reader process (web server) waits for migrations to complete before starting
**No manual migration command is needed** - the application handles this automatically on startup.
### 5. Commit Both Files
Add both files to git:
```bash
git add meshview/models.py
git add alembic/versions/ac311b3782a1_add_route_return_to_traceroute.py
git commit -m "Add route_return column to traceroute table"
```
## Important Notes
### SQLite Compatibility
Always use `batch_alter_table` for SQLite compatibility:
```python
with op.batch_alter_table('table_name', schema=None) as batch_op:
batch_op.add_column(...)
```
SQLite has limited ALTER TABLE support, and `batch_alter_table` works around these limitations.
### Migration Process
- **Writer process** (`startdb.py`): Runs migrations on startup
- **Reader process** (web server in `main.py`): Waits for migrations to complete
- Migrations are checked and applied every time the application starts
- The system uses a migration status table to coordinate between processes
### Common Column Types
```python
# Integer
column: Mapped[int] = mapped_column(BigInteger, nullable=True)
# String
column: Mapped[str] = mapped_column(nullable=True)
# Bytes/Binary
column: Mapped[bytes] = mapped_column(nullable=True)
# DateTime
column: Mapped[datetime] = mapped_column(nullable=True)
# Boolean
column: Mapped[bool] = mapped_column(nullable=True)
# Float
column: Mapped[float] = mapped_column(nullable=True)
```
### Migration File Location
Migrations are stored in: `alembic/versions/`
Each migration file includes:
- Revision ID (unique identifier)
- Down revision (previous migration in chain)
- Create date
- `upgrade()` function (applies changes)
- `downgrade()` function (reverts changes)
## Troubleshooting
### Migration Not Running
If migrations don't run automatically:
1. Check that the database is writable
2. Look for errors in the startup logs
3. Verify the migration chain is correct (each migration references the previous one)
### Manual Migration (Not Recommended)
If you need to manually run migrations for debugging:
```bash
./env/bin/alembic upgrade head
```
However, the application normally handles this automatically.

14
docs/README.md Normal file
View File

@@ -0,0 +1,14 @@
# Technical Documentation
This directory contains technical documentation for MeshView that goes beyond initial setup and basic usage.
These documents are intended for developers, contributors, and advanced users who need deeper insight into the system's architecture, database migrations, API endpoints, and internal workings.
## Contents
- [ALEMBIC_SETUP.md](ALEMBIC_SETUP.md) - Database migration setup and management
- [TIMESTAMP_MIGRATION.md](TIMESTAMP_MIGRATION.md) - Details on timestamp schema changes
- [API_Documentation.md](API_Documentation.md) - REST API endpoints and usage
- [CODE_IMPROVEMENTS.md](CODE_IMPROVEMENTS.md) - Suggested code improvements and refactoring ideas
For initial setup and basic usage instructions, please see the main [README.md](../README.md) in the root directory.

193
docs/TIMESTAMP_MIGRATION.md Normal file
View File

@@ -0,0 +1,193 @@
# High-Resolution Timestamp Migration
This document describes the implementation of GitHub issue #55: storing high-resolution timestamps as integers in the database for improved performance and query efficiency.
## Overview
The meshview database now stores timestamps in two formats:
1. **TEXT format** (`import_time`): Human-readable ISO8601 format with microseconds (e.g., `2025-03-12 04:15:56.058038`)
2. **INTEGER format** (`import_time_us`): Microseconds since Unix epoch (1970-01-01 00:00:00 UTC)
The dual format approach provides:
- **Backward compatibility**: Existing `import_time` TEXT columns remain unchanged
- **Performance**: Fast integer comparisons and math operations
- **Precision**: Microsecond resolution for accurate timing
- **Efficiency**: Compact storage and fast indexed lookups
## Database Changes
### New Columns Added
Three tables have new `import_time_us` columns:
1. **packet.import_time_us** (INTEGER)
- Stores when the packet was imported into the database
- Indexed for fast queries
2. **packet_seen.import_time_us** (INTEGER)
- Stores when the packet_seen record was imported
- Indexed for performance
3. **traceroute.import_time_us** (INTEGER)
- Stores when the traceroute was imported
- Indexed for fast lookups
### New Indexes
The following indexes were created for optimal query performance:
```sql
CREATE INDEX idx_packet_import_time_us ON packet(import_time_us DESC);
CREATE INDEX idx_packet_from_node_time_us ON packet(from_node_id, import_time_us DESC);
CREATE INDEX idx_packet_seen_import_time_us ON packet_seen(import_time_us);
CREATE INDEX idx_traceroute_import_time_us ON traceroute(import_time_us);
```
## Migration Process
### For Existing Databases
Run the migration script to add the new columns and populate them from existing data:
```bash
python migrate_add_timestamp_us.py [database_path]
```
If no path is provided, it defaults to `packets.db` in the current directory.
The migration script:
1. Checks if migration is needed (idempotent)
2. Adds `import_time_us` columns to the three tables
3. Populates the new columns from existing `import_time` values
4. Creates indexes for optimal performance
5. Verifies the migration completed successfully
### For New Databases
New databases created with the updated schema will automatically include the `import_time_us` columns. The MQTT store module populates both columns when inserting new records.
## Code Changes
### Models (meshview/models.py)
The ORM models now include the new `import_time_us` fields:
```python
class Packet(Base):
import_time: Mapped[datetime] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
```
### MQTT Store (meshview/mqtt_store.py)
The data ingestion logic now populates both timestamp columns using UTC time:
```python
now = datetime.datetime.now(datetime.timezone.utc)
now_us = int(now.timestamp() * 1_000_000)
# Both columns are populated
import_time=now,
import_time_us=now_us,
```
**Important**: All new timestamps use UTC (Coordinated Universal Time) for consistency across time zones.
## Using the New Timestamps
### Example Queries
**Query packets from the last 7 days:**
```sql
-- Old way (slower)
SELECT * FROM packet
WHERE import_time >= datetime('now', '-7 days');
-- New way (faster)
SELECT * FROM packet
WHERE import_time_us >= (strftime('%s', 'now', '-7 days') * 1000000);
```
**Query packets in a specific time range:**
```sql
SELECT * FROM packet
WHERE import_time_us BETWEEN 1759254380000000 AND 1759254390000000;
```
**Calculate time differences (in microseconds):**
```sql
SELECT
id,
(import_time_us - LAG(import_time_us) OVER (ORDER BY import_time_us)) / 1000000.0 as seconds_since_last
FROM packet
LIMIT 10;
```
### Converting Timestamps
**From datetime to microseconds (UTC):**
```python
import datetime
now = datetime.datetime.now(datetime.timezone.utc)
now_us = int(now.timestamp() * 1_000_000)
```
**From microseconds to datetime:**
```python
import datetime
timestamp_us = 1759254380813451
dt = datetime.datetime.fromtimestamp(timestamp_us / 1_000_000)
```
**In SQL queries:**
```sql
-- Datetime to microseconds
SELECT CAST((strftime('%s', import_time) || substr(import_time, 21, 6)) AS INTEGER);
-- Microseconds to datetime (approximate)
SELECT datetime(import_time_us / 1000000, 'unixepoch');
```
## Performance Benefits
The integer timestamp columns provide significant performance improvements:
1. **Faster comparisons**: Integer comparisons are much faster than string/datetime comparisons
2. **Smaller index size**: Integer indexes are more compact than datetime indexes
3. **Range queries**: BETWEEN operations on integers are highly optimized
4. **Math operations**: Easy to calculate time differences, averages, etc.
5. **Sorting**: Integer sorting is faster than datetime sorting
## Backward Compatibility
The original `import_time` TEXT columns remain unchanged:
- Existing code continues to work
- Human-readable timestamps still available
- Gradual migration to new columns possible
- No breaking changes for existing queries
## Future Work
Future improvements could include:
- Migrating queries to use `import_time_us` columns
- Deprecating the TEXT `import_time` columns (after transition period)
- Adding helper functions for timestamp conversion
- Creating views that expose both formats
## Testing
The migration was tested on a production database with:
- 132,466 packet records
- 1,385,659 packet_seen records
- 28,414 traceroute records
All records were successfully migrated with microsecond precision preserved.
## References
- GitHub Issue: #55 - Storing High-Resolution Timestamps in SQLite
- SQLite datetime functions: https://www.sqlite.org/lang_datefunc.html
- Python datetime module: https://docs.python.org/3/library/datetime.html

View File

@@ -1,12 +1,12 @@
import asyncio
from meshview import web
async def main():
async def main():
async with asyncio.TaskGroup() as tg:
tg.create_task(
web.run_server()
)
tg.create_task(web.run_server())
if __name__ == '__main__':
asyncio.run(main())

File diff suppressed because one or more lines are too long

View File

@@ -770,6 +770,7 @@ class SharedContact(google.protobuf.message.Message):
NODE_NUM_FIELD_NUMBER: builtins.int
USER_FIELD_NUMBER: builtins.int
SHOULD_IGNORE_FIELD_NUMBER: builtins.int
MANUALLY_VERIFIED_FIELD_NUMBER: builtins.int
node_num: builtins.int
"""
The node number of the contact
@@ -778,6 +779,10 @@ class SharedContact(google.protobuf.message.Message):
"""
Add this contact to the blocked / ignored list
"""
manually_verified: builtins.bool
"""
Set the IS_KEY_MANUALLY_VERIFIED bit
"""
@property
def user(self) -> meshtastic.protobuf.mesh_pb2.User:
"""
@@ -790,9 +795,10 @@ class SharedContact(google.protobuf.message.Message):
node_num: builtins.int = ...,
user: meshtastic.protobuf.mesh_pb2.User | None = ...,
should_ignore: builtins.bool = ...,
manually_verified: builtins.bool = ...,
) -> None: ...
def HasField(self, field_name: typing.Literal["user", b"user"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["node_num", b"node_num", "should_ignore", b"should_ignore", "user", b"user"]) -> None: ...
def ClearField(self, field_name: typing.Literal["manually_verified", b"manually_verified", "node_num", b"node_num", "should_ignore", b"should_ignore", "user", b"user"]) -> None: ...
global___SharedContact = SharedContact

View File

@@ -15,14 +15,14 @@ from meshtastic.protobuf import channel_pb2 as meshtastic_dot_protobuf_dot_chann
from meshtastic.protobuf import config_pb2 as meshtastic_dot_protobuf_dot_config__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!meshtastic/protobuf/apponly.proto\x12\x13meshtastic.protobuf\x1a!meshtastic/protobuf/channel.proto\x1a meshtastic/protobuf/config.proto\"\x81\x01\n\nChannelSet\x12\x36\n\x08settings\x18\x01 \x03(\x0b\x32$.meshtastic.protobuf.ChannelSettings\x12;\n\x0blora_config\x18\x02 \x01(\x0b\x32&.meshtastic.protobuf.Config.LoRaConfigBb\n\x13\x63om.geeksville.meshB\rAppOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!meshtastic/protobuf/apponly.proto\x12\x13meshtastic.protobuf\x1a!meshtastic/protobuf/channel.proto\x1a meshtastic/protobuf/config.proto\"\x81\x01\n\nChannelSet\x12\x36\n\x08settings\x18\x01 \x03(\x0b\x32$.meshtastic.protobuf.ChannelSettings\x12;\n\x0blora_config\x18\x02 \x01(\x0b\x32&.meshtastic.protobuf.Config.LoRaConfigBc\n\x14org.meshtastic.protoB\rAppOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.apponly_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\rAppOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\rAppOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_CHANNELSET']._serialized_start=128
_globals['_CHANNELSET']._serialized_end=257
# @@protoc_insertion_point(module_scope)

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1emeshtastic/protobuf/atak.proto\x12\x13meshtastic.protobuf\"\xa5\x02\n\tTAKPacket\x12\x15\n\ris_compressed\x18\x01 \x01(\x08\x12-\n\x07\x63ontact\x18\x02 \x01(\x0b\x32\x1c.meshtastic.protobuf.Contact\x12)\n\x05group\x18\x03 \x01(\x0b\x32\x1a.meshtastic.protobuf.Group\x12+\n\x06status\x18\x04 \x01(\x0b\x32\x1b.meshtastic.protobuf.Status\x12\'\n\x03pli\x18\x05 \x01(\x0b\x32\x18.meshtastic.protobuf.PLIH\x00\x12,\n\x04\x63hat\x18\x06 \x01(\x0b\x32\x1c.meshtastic.protobuf.GeoChatH\x00\x12\x10\n\x06\x64\x65tail\x18\x07 \x01(\x0cH\x00\x42\x11\n\x0fpayload_variant\"\\\n\x07GeoChat\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0f\n\x02to\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0bto_callsign\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x05\n\x03_toB\x0e\n\x0c_to_callsign\"_\n\x05Group\x12-\n\x04role\x18\x01 \x01(\x0e\x32\x1f.meshtastic.protobuf.MemberRole\x12\'\n\x04team\x18\x02 \x01(\x0e\x32\x19.meshtastic.protobuf.Team\"\x19\n\x06Status\x12\x0f\n\x07\x62\x61ttery\x18\x01 \x01(\r\"4\n\x07\x43ontact\x12\x10\n\x08\x63\x61llsign\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x65vice_callsign\x18\x02 \x01(\t\"_\n\x03PLI\x12\x12\n\nlatitude_i\x18\x01 \x01(\x0f\x12\x13\n\x0blongitude_i\x18\x02 \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x03 \x01(\x05\x12\r\n\x05speed\x18\x04 \x01(\r\x12\x0e\n\x06\x63ourse\x18\x05 \x01(\r*\xc0\x01\n\x04Team\x12\x14\n\x10Unspecifed_Color\x10\x00\x12\t\n\x05White\x10\x01\x12\n\n\x06Yellow\x10\x02\x12\n\n\x06Orange\x10\x03\x12\x0b\n\x07Magenta\x10\x04\x12\x07\n\x03Red\x10\x05\x12\n\n\x06Maroon\x10\x06\x12\n\n\x06Purple\x10\x07\x12\r\n\tDark_Blue\x10\x08\x12\x08\n\x04\x42lue\x10\t\x12\x08\n\x04\x43yan\x10\n\x12\x08\n\x04Teal\x10\x0b\x12\t\n\x05Green\x10\x0c\x12\x0e\n\nDark_Green\x10\r\x12\t\n\x05\x42rown\x10\x0e*\x7f\n\nMemberRole\x12\x0e\n\nUnspecifed\x10\x00\x12\x0e\n\nTeamMember\x10\x01\x12\x0c\n\x08TeamLead\x10\x02\x12\x06\n\x02HQ\x10\x03\x12\n\n\x06Sniper\x10\x04\x12\t\n\x05Medic\x10\x05\x12\x13\n\x0f\x46orwardObserver\x10\x06\x12\x07\n\x03RTO\x10\x07\x12\x06\n\x02K9\x10\x08\x42_\n\x13\x63om.geeksville.meshB\nATAKProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1emeshtastic/protobuf/atak.proto\x12\x13meshtastic.protobuf\"\xa5\x02\n\tTAKPacket\x12\x15\n\ris_compressed\x18\x01 \x01(\x08\x12-\n\x07\x63ontact\x18\x02 \x01(\x0b\x32\x1c.meshtastic.protobuf.Contact\x12)\n\x05group\x18\x03 \x01(\x0b\x32\x1a.meshtastic.protobuf.Group\x12+\n\x06status\x18\x04 \x01(\x0b\x32\x1b.meshtastic.protobuf.Status\x12\'\n\x03pli\x18\x05 \x01(\x0b\x32\x18.meshtastic.protobuf.PLIH\x00\x12,\n\x04\x63hat\x18\x06 \x01(\x0b\x32\x1c.meshtastic.protobuf.GeoChatH\x00\x12\x10\n\x06\x64\x65tail\x18\x07 \x01(\x0cH\x00\x42\x11\n\x0fpayload_variant\"\\\n\x07GeoChat\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0f\n\x02to\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0bto_callsign\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x05\n\x03_toB\x0e\n\x0c_to_callsign\"_\n\x05Group\x12-\n\x04role\x18\x01 \x01(\x0e\x32\x1f.meshtastic.protobuf.MemberRole\x12\'\n\x04team\x18\x02 \x01(\x0e\x32\x19.meshtastic.protobuf.Team\"\x19\n\x06Status\x12\x0f\n\x07\x62\x61ttery\x18\x01 \x01(\r\"4\n\x07\x43ontact\x12\x10\n\x08\x63\x61llsign\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x65vice_callsign\x18\x02 \x01(\t\"_\n\x03PLI\x12\x12\n\nlatitude_i\x18\x01 \x01(\x0f\x12\x13\n\x0blongitude_i\x18\x02 \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x03 \x01(\x05\x12\r\n\x05speed\x18\x04 \x01(\r\x12\x0e\n\x06\x63ourse\x18\x05 \x01(\r*\xc0\x01\n\x04Team\x12\x14\n\x10Unspecifed_Color\x10\x00\x12\t\n\x05White\x10\x01\x12\n\n\x06Yellow\x10\x02\x12\n\n\x06Orange\x10\x03\x12\x0b\n\x07Magenta\x10\x04\x12\x07\n\x03Red\x10\x05\x12\n\n\x06Maroon\x10\x06\x12\n\n\x06Purple\x10\x07\x12\r\n\tDark_Blue\x10\x08\x12\x08\n\x04\x42lue\x10\t\x12\x08\n\x04\x43yan\x10\n\x12\x08\n\x04Teal\x10\x0b\x12\t\n\x05Green\x10\x0c\x12\x0e\n\nDark_Green\x10\r\x12\t\n\x05\x42rown\x10\x0e*\x7f\n\nMemberRole\x12\x0e\n\nUnspecifed\x10\x00\x12\x0e\n\nTeamMember\x10\x01\x12\x0c\n\x08TeamLead\x10\x02\x12\x06\n\x02HQ\x10\x03\x12\n\n\x06Sniper\x10\x04\x12\t\n\x05Medic\x10\x05\x12\x13\n\x0f\x46orwardObserver\x10\x06\x12\x07\n\x03RTO\x10\x07\x12\x06\n\x02K9\x10\x08\x42`\n\x14org.meshtastic.protoB\nATAKProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.atak_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\nATAKProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\nATAKProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_TEAM']._serialized_start=721
_globals['_TEAM']._serialized_end=913
_globals['_MEMBERROLE']._serialized_start=915

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(meshtastic/protobuf/cannedmessages.proto\x12\x13meshtastic.protobuf\"-\n\x19\x43\x61nnedMessageModuleConfig\x12\x10\n\x08messages\x18\x01 \x01(\tBn\n\x13\x63om.geeksville.meshB\x19\x43\x61nnedMessageConfigProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(meshtastic/protobuf/cannedmessages.proto\x12\x13meshtastic.protobuf\"-\n\x19\x43\x61nnedMessageModuleConfig\x12\x10\n\x08messages\x18\x01 \x01(\tBo\n\x14org.meshtastic.protoB\x19\x43\x61nnedMessageConfigProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.cannedmessages_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\031CannedMessageConfigProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\031CannedMessageConfigProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_CANNEDMESSAGEMODULECONFIG']._serialized_start=65
_globals['_CANNEDMESSAGEMODULECONFIG']._serialized_end=110
# @@protoc_insertion_point(module_scope)

View File

@@ -13,22 +13,22 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!meshtastic/protobuf/channel.proto\x12\x13meshtastic.protobuf\"\xc1\x01\n\x0f\x43hannelSettings\x12\x17\n\x0b\x63hannel_num\x18\x01 \x01(\rB\x02\x18\x01\x12\x0b\n\x03psk\x18\x02 \x01(\x0c\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\n\n\x02id\x18\x04 \x01(\x07\x12\x16\n\x0euplink_enabled\x18\x05 \x01(\x08\x12\x18\n\x10\x64ownlink_enabled\x18\x06 \x01(\x08\x12<\n\x0fmodule_settings\x18\x07 \x01(\x0b\x32#.meshtastic.protobuf.ModuleSettings\"E\n\x0eModuleSettings\x12\x1a\n\x12position_precision\x18\x01 \x01(\r\x12\x17\n\x0fis_client_muted\x18\x02 \x01(\x08\"\xb3\x01\n\x07\x43hannel\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x36\n\x08settings\x18\x02 \x01(\x0b\x32$.meshtastic.protobuf.ChannelSettings\x12/\n\x04role\x18\x03 \x01(\x0e\x32!.meshtastic.protobuf.Channel.Role\"0\n\x04Role\x12\x0c\n\x08\x44ISABLED\x10\x00\x12\x0b\n\x07PRIMARY\x10\x01\x12\r\n\tSECONDARY\x10\x02\x42\x62\n\x13\x63om.geeksville.meshB\rChannelProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!meshtastic/protobuf/channel.proto\x12\x13meshtastic.protobuf\"\xc1\x01\n\x0f\x43hannelSettings\x12\x17\n\x0b\x63hannel_num\x18\x01 \x01(\rB\x02\x18\x01\x12\x0b\n\x03psk\x18\x02 \x01(\x0c\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\n\n\x02id\x18\x04 \x01(\x07\x12\x16\n\x0euplink_enabled\x18\x05 \x01(\x08\x12\x18\n\x10\x64ownlink_enabled\x18\x06 \x01(\x08\x12<\n\x0fmodule_settings\x18\x07 \x01(\x0b\x32#.meshtastic.protobuf.ModuleSettings\">\n\x0eModuleSettings\x12\x1a\n\x12position_precision\x18\x01 \x01(\r\x12\x10\n\x08is_muted\x18\x02 \x01(\x08\"\xb3\x01\n\x07\x43hannel\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x36\n\x08settings\x18\x02 \x01(\x0b\x32$.meshtastic.protobuf.ChannelSettings\x12/\n\x04role\x18\x03 \x01(\x0e\x32!.meshtastic.protobuf.Channel.Role\"0\n\x04Role\x12\x0c\n\x08\x44ISABLED\x10\x00\x12\x0b\n\x07PRIMARY\x10\x01\x12\r\n\tSECONDARY\x10\x02\x42\x63\n\x14org.meshtastic.protoB\rChannelProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.channel_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\rChannelProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\rChannelProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_CHANNELSETTINGS.fields_by_name['channel_num']._options = None
_CHANNELSETTINGS.fields_by_name['channel_num']._serialized_options = b'\030\001'
_globals['_CHANNELSETTINGS']._serialized_start=59
_globals['_CHANNELSETTINGS']._serialized_end=252
_globals['_MODULESETTINGS']._serialized_start=254
_globals['_MODULESETTINGS']._serialized_end=323
_globals['_CHANNEL']._serialized_start=326
_globals['_CHANNEL']._serialized_end=505
_globals['_CHANNEL_ROLE']._serialized_start=457
_globals['_CHANNEL_ROLE']._serialized_end=505
_globals['_MODULESETTINGS']._serialized_end=316
_globals['_CHANNEL']._serialized_start=319
_globals['_CHANNEL']._serialized_end=498
_globals['_CHANNEL_ROLE']._serialized_start=450
_globals['_CHANNEL_ROLE']._serialized_end=498
# @@protoc_insertion_point(module_scope)

View File

@@ -127,23 +127,23 @@ class ModuleSettings(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
POSITION_PRECISION_FIELD_NUMBER: builtins.int
IS_CLIENT_MUTED_FIELD_NUMBER: builtins.int
IS_MUTED_FIELD_NUMBER: builtins.int
position_precision: builtins.int
"""
Bits of precision for the location sent in position packets.
"""
is_client_muted: builtins.bool
is_muted: builtins.bool
"""
Controls whether or not the phone / clients should mute the current channel
Controls whether or not the client / device should mute the current channel
Useful for noisy public channels you don't necessarily want to disable
"""
def __init__(
self,
*,
position_precision: builtins.int = ...,
is_client_muted: builtins.bool = ...,
is_muted: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["is_client_muted", b"is_client_muted", "position_precision", b"position_precision"]) -> None: ...
def ClearField(self, field_name: typing.Literal["is_muted", b"is_muted", "position_precision", b"position_precision"]) -> None: ...
global___ModuleSettings = ModuleSettings

View File

@@ -15,14 +15,14 @@ from meshtastic.protobuf import localonly_pb2 as meshtastic_dot_protobuf_dot_loc
from meshtastic.protobuf import mesh_pb2 as meshtastic_dot_protobuf_dot_mesh__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$meshtastic/protobuf/clientonly.proto\x12\x13meshtastic.protobuf\x1a#meshtastic/protobuf/localonly.proto\x1a\x1emeshtastic/protobuf/mesh.proto\"\xc4\x03\n\rDeviceProfile\x12\x16\n\tlong_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nshort_name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hannel_url\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x35\n\x06\x63onfig\x18\x04 \x01(\x0b\x32 .meshtastic.protobuf.LocalConfigH\x03\x88\x01\x01\x12\x42\n\rmodule_config\x18\x05 \x01(\x0b\x32&.meshtastic.protobuf.LocalModuleConfigH\x04\x88\x01\x01\x12:\n\x0e\x66ixed_position\x18\x06 \x01(\x0b\x32\x1d.meshtastic.protobuf.PositionH\x05\x88\x01\x01\x12\x15\n\x08ringtone\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1c\n\x0f\x63\x61nned_messages\x18\x08 \x01(\tH\x07\x88\x01\x01\x42\x0c\n\n_long_nameB\r\n\x0b_short_nameB\x0e\n\x0c_channel_urlB\t\n\x07_configB\x10\n\x0e_module_configB\x11\n\x0f_fixed_positionB\x0b\n\t_ringtoneB\x12\n\x10_canned_messagesBe\n\x13\x63om.geeksville.meshB\x10\x43lientOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$meshtastic/protobuf/clientonly.proto\x12\x13meshtastic.protobuf\x1a#meshtastic/protobuf/localonly.proto\x1a\x1emeshtastic/protobuf/mesh.proto\"\xc4\x03\n\rDeviceProfile\x12\x16\n\tlong_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nshort_name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hannel_url\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x35\n\x06\x63onfig\x18\x04 \x01(\x0b\x32 .meshtastic.protobuf.LocalConfigH\x03\x88\x01\x01\x12\x42\n\rmodule_config\x18\x05 \x01(\x0b\x32&.meshtastic.protobuf.LocalModuleConfigH\x04\x88\x01\x01\x12:\n\x0e\x66ixed_position\x18\x06 \x01(\x0b\x32\x1d.meshtastic.protobuf.PositionH\x05\x88\x01\x01\x12\x15\n\x08ringtone\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1c\n\x0f\x63\x61nned_messages\x18\x08 \x01(\tH\x07\x88\x01\x01\x42\x0c\n\n_long_nameB\r\n\x0b_short_nameB\x0e\n\x0c_channel_urlB\t\n\x07_configB\x10\n\x0e_module_configB\x11\n\x0f_fixed_positionB\x0b\n\t_ringtoneB\x12\n\x10_canned_messagesBf\n\x14org.meshtastic.protoB\x10\x43lientOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.clientonly_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\020ClientOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\020ClientOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_DEVICEPROFILE']._serialized_start=131
_globals['_DEVICEPROFILE']._serialized_end=583
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View File

@@ -64,6 +64,7 @@ class Config(google.protobuf.message.Message):
Description: Infrastructure node for extending network coverage by relaying messages with minimal overhead. Not visible in Nodes list.
Technical Details: Mesh packets will simply be rebroadcasted over this node. Nodes configured with this role will not originate NodeInfo, Position, Telemetry
or any other packet type. They will simply rebroadcast any mesh packets on the same frequency, channel num, spread factor, and coding rate.
Deprecated in v2.7.11 because it creates "holes" in the mesh rebroadcast chain.
"""
TRACKER: Config.DeviceConfig._Role.ValueType # 5
"""
@@ -116,6 +117,13 @@ class Config(google.protobuf.message.Message):
but should not be given priority over other routers in order to avoid unnecessaraily
consuming hops.
"""
CLIENT_BASE: Config.DeviceConfig._Role.ValueType # 12
"""
Description: Treats packets from or to favorited nodes as ROUTER, and all other packets as CLIENT.
Technical Details: Used for stronger attic/roof nodes to distribute messages more widely
from weaker, indoor, or less-well-positioned nodes. Recommended for users with multiple nodes
where one CLIENT_BASE acts as a more powerful base station, such as an attic/roof node.
"""
class Role(_Role, metaclass=_RoleEnumTypeWrapper):
"""
@@ -148,6 +156,7 @@ class Config(google.protobuf.message.Message):
Description: Infrastructure node for extending network coverage by relaying messages with minimal overhead. Not visible in Nodes list.
Technical Details: Mesh packets will simply be rebroadcasted over this node. Nodes configured with this role will not originate NodeInfo, Position, Telemetry
or any other packet type. They will simply rebroadcast any mesh packets on the same frequency, channel num, spread factor, and coding rate.
Deprecated in v2.7.11 because it creates "holes" in the mesh rebroadcast chain.
"""
TRACKER: Config.DeviceConfig.Role.ValueType # 5
"""
@@ -200,6 +209,13 @@ class Config(google.protobuf.message.Message):
but should not be given priority over other routers in order to avoid unnecessaraily
consuming hops.
"""
CLIENT_BASE: Config.DeviceConfig.Role.ValueType # 12
"""
Description: Treats packets from or to favorited nodes as ROUTER, and all other packets as CLIENT.
Technical Details: Used for stronger attic/roof nodes to distribute messages more widely
from weaker, indoor, or less-well-positioned nodes. Recommended for users with multiple nodes
where one CLIENT_BASE acts as a more powerful base station, such as an attic/roof node.
"""
class _RebroadcastMode:
ValueType = typing.NewType("ValueType", builtins.int)
@@ -924,80 +940,20 @@ class Config(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _GpsCoordinateFormat:
class _DeprecatedGpsCoordinateFormat:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _GpsCoordinateFormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Config.DisplayConfig._GpsCoordinateFormat.ValueType], builtins.type):
class _DeprecatedGpsCoordinateFormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Config.DisplayConfig._DeprecatedGpsCoordinateFormat.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEC: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 0
UNUSED: Config.DisplayConfig._DeprecatedGpsCoordinateFormat.ValueType # 0
class DeprecatedGpsCoordinateFormat(_DeprecatedGpsCoordinateFormat, metaclass=_DeprecatedGpsCoordinateFormatEnumTypeWrapper):
"""
GPS coordinates are displayed in the normal decimal degrees format:
DD.DDDDDD DDD.DDDDDD
"""
DMS: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 1
"""
GPS coordinates are displayed in the degrees minutes seconds format:
DD°MM'SS"C DDD°MM'SS"C, where C is the compass point representing the locations quadrant
"""
UTM: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 2
"""
Universal Transverse Mercator format:
ZZB EEEEEE NNNNNNN, where Z is zone, B is band, E is easting, N is northing
"""
MGRS: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 3
"""
Military Grid Reference System format:
ZZB CD EEEEE NNNNN, where Z is zone, B is band, C is the east 100k square, D is the north 100k square,
E is easting, N is northing
"""
OLC: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 4
"""
Open Location Code (aka Plus Codes).
"""
OSGR: Config.DisplayConfig._GpsCoordinateFormat.ValueType # 5
"""
Ordnance Survey Grid Reference (the National Grid System of the UK).
Format: AB EEEEE NNNNN, where A is the east 100k square, B is the north 100k square,
E is the easting, N is the northing
Deprecated in 2.7.4: Unused
"""
class GpsCoordinateFormat(_GpsCoordinateFormat, metaclass=_GpsCoordinateFormatEnumTypeWrapper):
"""
How the GPS coordinates are displayed on the OLED screen.
"""
DEC: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 0
"""
GPS coordinates are displayed in the normal decimal degrees format:
DD.DDDDDD DDD.DDDDDD
"""
DMS: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 1
"""
GPS coordinates are displayed in the degrees minutes seconds format:
DD°MM'SS"C DDD°MM'SS"C, where C is the compass point representing the locations quadrant
"""
UTM: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 2
"""
Universal Transverse Mercator format:
ZZB EEEEEE NNNNNNN, where Z is zone, B is band, E is easting, N is northing
"""
MGRS: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 3
"""
Military Grid Reference System format:
ZZB CD EEEEE NNNNN, where Z is zone, B is band, C is the east 100k square, D is the north 100k square,
E is easting, N is northing
"""
OLC: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 4
"""
Open Location Code (aka Plus Codes).
"""
OSGR: Config.DisplayConfig.GpsCoordinateFormat.ValueType # 5
"""
Ordnance Survey Grid Reference (the National Grid System of the UK).
Format: AB EEEEE NNNNN, where A is the east 100k square, B is the north 100k square,
E is the easting, N is the northing
"""
UNUSED: Config.DisplayConfig.DeprecatedGpsCoordinateFormat.ValueType # 0
class _DisplayUnits:
ValueType = typing.NewType("ValueType", builtins.int)
@@ -1048,12 +1004,12 @@ class Config(google.protobuf.message.Message):
"""
OLED_SH1107: Config.DisplayConfig._OledType.ValueType # 3
"""
Can not be auto detected but set by proto. Used for 128x128 screens
"""
OLED_SH1107_128_64: Config.DisplayConfig._OledType.ValueType # 4
"""
Can not be auto detected but set by proto. Used for 128x64 screens
"""
OLED_SH1107_128_128: Config.DisplayConfig._OledType.ValueType # 4
"""
Can not be auto detected but set by proto. Used for 128x128 screens
"""
class OledType(_OledType, metaclass=_OledTypeEnumTypeWrapper):
"""
@@ -1074,12 +1030,12 @@ class Config(google.protobuf.message.Message):
"""
OLED_SH1107: Config.DisplayConfig.OledType.ValueType # 3
"""
Can not be auto detected but set by proto. Used for 128x128 screens
"""
OLED_SH1107_128_64: Config.DisplayConfig.OledType.ValueType # 4
"""
Can not be auto detected but set by proto. Used for 128x64 screens
"""
OLED_SH1107_128_128: Config.DisplayConfig.OledType.ValueType # 4
"""
Can not be auto detected but set by proto. Used for 128x128 screens
"""
class _DisplayMode:
ValueType = typing.NewType("ValueType", builtins.int)
@@ -1207,12 +1163,13 @@ class Config(google.protobuf.message.Message):
WAKE_ON_TAP_OR_MOTION_FIELD_NUMBER: builtins.int
COMPASS_ORIENTATION_FIELD_NUMBER: builtins.int
USE_12H_CLOCK_FIELD_NUMBER: builtins.int
USE_LONG_NODE_NAME_FIELD_NUMBER: builtins.int
screen_on_secs: builtins.int
"""
Number of seconds the screen stays on after pressing the user button or receiving a message
0 for default of one minute MAXUINT for always on
"""
gps_format: global___Config.DisplayConfig.GpsCoordinateFormat.ValueType
gps_format: global___Config.DisplayConfig.DeprecatedGpsCoordinateFormat.ValueType
"""
Deprecated in 2.7.4: Unused
How the GPS coordinates are formatted on the OLED screen.
@@ -1260,11 +1217,16 @@ class Config(google.protobuf.message.Message):
If false (default), the device will display the time in 24-hour format on screen.
If true, the device will display the time in 12-hour format on screen.
"""
use_long_node_name: builtins.bool
"""
If false (default), the device will use short names for various display screens.
If true, node names will show in long format
"""
def __init__(
self,
*,
screen_on_secs: builtins.int = ...,
gps_format: global___Config.DisplayConfig.GpsCoordinateFormat.ValueType = ...,
gps_format: global___Config.DisplayConfig.DeprecatedGpsCoordinateFormat.ValueType = ...,
auto_screen_carousel_secs: builtins.int = ...,
compass_north_top: builtins.bool = ...,
flip_screen: builtins.bool = ...,
@@ -1275,8 +1237,9 @@ class Config(google.protobuf.message.Message):
wake_on_tap_or_motion: builtins.bool = ...,
compass_orientation: global___Config.DisplayConfig.CompassOrientation.ValueType = ...,
use_12h_clock: builtins.bool = ...,
use_long_node_name: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["auto_screen_carousel_secs", b"auto_screen_carousel_secs", "compass_north_top", b"compass_north_top", "compass_orientation", b"compass_orientation", "displaymode", b"displaymode", "flip_screen", b"flip_screen", "gps_format", b"gps_format", "heading_bold", b"heading_bold", "oled", b"oled", "screen_on_secs", b"screen_on_secs", "units", b"units", "use_12h_clock", b"use_12h_clock", "wake_on_tap_or_motion", b"wake_on_tap_or_motion"]) -> None: ...
def ClearField(self, field_name: typing.Literal["auto_screen_carousel_secs", b"auto_screen_carousel_secs", "compass_north_top", b"compass_north_top", "compass_orientation", b"compass_orientation", "displaymode", b"displaymode", "flip_screen", b"flip_screen", "gps_format", b"gps_format", "heading_bold", b"heading_bold", "oled", b"oled", "screen_on_secs", b"screen_on_secs", "units", b"units", "use_12h_clock", b"use_12h_clock", "use_long_node_name", b"use_long_node_name", "wake_on_tap_or_motion", b"wake_on_tap_or_motion"]) -> None: ...
@typing.final
class LoRaConfig(google.protobuf.message.Message):

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+meshtastic/protobuf/connection_status.proto\x12\x13meshtastic.protobuf\"\xd5\x02\n\x16\x44\x65viceConnectionStatus\x12<\n\x04wifi\x18\x01 \x01(\x0b\x32).meshtastic.protobuf.WifiConnectionStatusH\x00\x88\x01\x01\x12\x44\n\x08\x65thernet\x18\x02 \x01(\x0b\x32-.meshtastic.protobuf.EthernetConnectionStatusH\x01\x88\x01\x01\x12\x46\n\tbluetooth\x18\x03 \x01(\x0b\x32..meshtastic.protobuf.BluetoothConnectionStatusH\x02\x88\x01\x01\x12@\n\x06serial\x18\x04 \x01(\x0b\x32+.meshtastic.protobuf.SerialConnectionStatusH\x03\x88\x01\x01\x42\x07\n\x05_wifiB\x0b\n\t_ethernetB\x0c\n\n_bluetoothB\t\n\x07_serial\"p\n\x14WifiConnectionStatus\x12<\n\x06status\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.NetworkConnectionStatus\x12\x0c\n\x04ssid\x18\x02 \x01(\t\x12\x0c\n\x04rssi\x18\x03 \x01(\x05\"X\n\x18\x45thernetConnectionStatus\x12<\n\x06status\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.NetworkConnectionStatus\"{\n\x17NetworkConnectionStatus\x12\x12\n\nip_address\x18\x01 \x01(\x07\x12\x14\n\x0cis_connected\x18\x02 \x01(\x08\x12\x19\n\x11is_mqtt_connected\x18\x03 \x01(\x08\x12\x1b\n\x13is_syslog_connected\x18\x04 \x01(\x08\"L\n\x19\x42luetoothConnectionStatus\x12\x0b\n\x03pin\x18\x01 \x01(\r\x12\x0c\n\x04rssi\x18\x02 \x01(\x05\x12\x14\n\x0cis_connected\x18\x03 \x01(\x08\"<\n\x16SerialConnectionStatus\x12\x0c\n\x04\x62\x61ud\x18\x01 \x01(\r\x12\x14\n\x0cis_connected\x18\x02 \x01(\x08\x42\x65\n\x13\x63om.geeksville.meshB\x10\x43onnStatusProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+meshtastic/protobuf/connection_status.proto\x12\x13meshtastic.protobuf\"\xd5\x02\n\x16\x44\x65viceConnectionStatus\x12<\n\x04wifi\x18\x01 \x01(\x0b\x32).meshtastic.protobuf.WifiConnectionStatusH\x00\x88\x01\x01\x12\x44\n\x08\x65thernet\x18\x02 \x01(\x0b\x32-.meshtastic.protobuf.EthernetConnectionStatusH\x01\x88\x01\x01\x12\x46\n\tbluetooth\x18\x03 \x01(\x0b\x32..meshtastic.protobuf.BluetoothConnectionStatusH\x02\x88\x01\x01\x12@\n\x06serial\x18\x04 \x01(\x0b\x32+.meshtastic.protobuf.SerialConnectionStatusH\x03\x88\x01\x01\x42\x07\n\x05_wifiB\x0b\n\t_ethernetB\x0c\n\n_bluetoothB\t\n\x07_serial\"p\n\x14WifiConnectionStatus\x12<\n\x06status\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.NetworkConnectionStatus\x12\x0c\n\x04ssid\x18\x02 \x01(\t\x12\x0c\n\x04rssi\x18\x03 \x01(\x05\"X\n\x18\x45thernetConnectionStatus\x12<\n\x06status\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.NetworkConnectionStatus\"{\n\x17NetworkConnectionStatus\x12\x12\n\nip_address\x18\x01 \x01(\x07\x12\x14\n\x0cis_connected\x18\x02 \x01(\x08\x12\x19\n\x11is_mqtt_connected\x18\x03 \x01(\x08\x12\x1b\n\x13is_syslog_connected\x18\x04 \x01(\x08\"L\n\x19\x42luetoothConnectionStatus\x12\x0b\n\x03pin\x18\x01 \x01(\r\x12\x0c\n\x04rssi\x18\x02 \x01(\x05\x12\x14\n\x0cis_connected\x18\x03 \x01(\x08\"<\n\x16SerialConnectionStatus\x12\x0c\n\x04\x62\x61ud\x18\x01 \x01(\r\x12\x14\n\x0cis_connected\x18\x02 \x01(\x08\x42\x66\n\x14org.meshtastic.protoB\x10\x43onnStatusProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.connection_status_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\020ConnStatusProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\020ConnStatusProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_DEVICECONNECTIONSTATUS']._serialized_start=69
_globals['_DEVICECONNECTIONSTATUS']._serialized_end=410
_globals['_WIFICONNECTIONSTATUS']._serialized_start=412

View File

@@ -13,28 +13,30 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#meshtastic/protobuf/device_ui.proto\x12\x13meshtastic.protobuf\"\xda\x04\n\x0e\x44\x65viceUIConfig\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\x19\n\x11screen_brightness\x18\x02 \x01(\r\x12\x16\n\x0escreen_timeout\x18\x03 \x01(\r\x12\x13\n\x0bscreen_lock\x18\x04 \x01(\x08\x12\x15\n\rsettings_lock\x18\x05 \x01(\x08\x12\x10\n\x08pin_code\x18\x06 \x01(\r\x12)\n\x05theme\x18\x07 \x01(\x0e\x32\x1a.meshtastic.protobuf.Theme\x12\x15\n\ralert_enabled\x18\x08 \x01(\x08\x12\x16\n\x0e\x62\x61nner_enabled\x18\t \x01(\x08\x12\x14\n\x0cring_tone_id\x18\n \x01(\r\x12/\n\x08language\x18\x0b \x01(\x0e\x32\x1d.meshtastic.protobuf.Language\x12\x34\n\x0bnode_filter\x18\x0c \x01(\x0b\x32\x1f.meshtastic.protobuf.NodeFilter\x12:\n\x0enode_highlight\x18\r \x01(\x0b\x32\".meshtastic.protobuf.NodeHighlight\x12\x18\n\x10\x63\x61libration_data\x18\x0e \x01(\x0c\x12*\n\x08map_data\x18\x0f \x01(\x0b\x32\x18.meshtastic.protobuf.Map\x12\x36\n\x0c\x63ompass_mode\x18\x10 \x01(\x0e\x32 .meshtastic.protobuf.CompassMode\x12\x18\n\x10screen_rgb_color\x18\x11 \x01(\r\x12\x1b\n\x13is_clockface_analog\x18\x12 \x01(\x08\"\xa7\x01\n\nNodeFilter\x12\x16\n\x0eunknown_switch\x18\x01 \x01(\x08\x12\x16\n\x0eoffline_switch\x18\x02 \x01(\x08\x12\x19\n\x11public_key_switch\x18\x03 \x01(\x08\x12\x11\n\thops_away\x18\x04 \x01(\x05\x12\x17\n\x0fposition_switch\x18\x05 \x01(\x08\x12\x11\n\tnode_name\x18\x06 \x01(\t\x12\x0f\n\x07\x63hannel\x18\x07 \x01(\x05\"~\n\rNodeHighlight\x12\x13\n\x0b\x63hat_switch\x18\x01 \x01(\x08\x12\x17\n\x0fposition_switch\x18\x02 \x01(\x08\x12\x18\n\x10telemetry_switch\x18\x03 \x01(\x08\x12\x12\n\niaq_switch\x18\x04 \x01(\x08\x12\x11\n\tnode_name\x18\x05 \x01(\t\"=\n\x08GeoPoint\x12\x0c\n\x04zoom\x18\x01 \x01(\x05\x12\x10\n\x08latitude\x18\x02 \x01(\x05\x12\x11\n\tlongitude\x18\x03 \x01(\x05\"U\n\x03Map\x12+\n\x04home\x18\x01 \x01(\x0b\x32\x1d.meshtastic.protobuf.GeoPoint\x12\r\n\x05style\x18\x02 \x01(\t\x12\x12\n\nfollow_gps\x18\x03 \x01(\x08*>\n\x0b\x43ompassMode\x12\x0b\n\x07\x44YNAMIC\x10\x00\x12\x0e\n\nFIXED_RING\x10\x01\x12\x12\n\x0e\x46REEZE_HEADING\x10\x02*%\n\x05Theme\x12\x08\n\x04\x44\x41RK\x10\x00\x12\t\n\x05LIGHT\x10\x01\x12\x07\n\x03RED\x10\x02*\xa9\x02\n\x08Language\x12\x0b\n\x07\x45NGLISH\x10\x00\x12\n\n\x06\x46RENCH\x10\x01\x12\n\n\x06GERMAN\x10\x02\x12\x0b\n\x07ITALIAN\x10\x03\x12\x0e\n\nPORTUGUESE\x10\x04\x12\x0b\n\x07SPANISH\x10\x05\x12\x0b\n\x07SWEDISH\x10\x06\x12\x0b\n\x07\x46INNISH\x10\x07\x12\n\n\x06POLISH\x10\x08\x12\x0b\n\x07TURKISH\x10\t\x12\x0b\n\x07SERBIAN\x10\n\x12\x0b\n\x07RUSSIAN\x10\x0b\x12\t\n\x05\x44UTCH\x10\x0c\x12\t\n\x05GREEK\x10\r\x12\r\n\tNORWEGIAN\x10\x0e\x12\r\n\tSLOVENIAN\x10\x0f\x12\r\n\tUKRAINIAN\x10\x10\x12\r\n\tBULGARIAN\x10\x11\x12\x16\n\x12SIMPLIFIED_CHINESE\x10\x1e\x12\x17\n\x13TRADITIONAL_CHINESE\x10\x1f\x42\x63\n\x13\x63om.geeksville.meshB\x0e\x44\x65viceUIProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#meshtastic/protobuf/device_ui.proto\x12\x13meshtastic.protobuf\"\xff\x05\n\x0e\x44\x65viceUIConfig\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\x19\n\x11screen_brightness\x18\x02 \x01(\r\x12\x16\n\x0escreen_timeout\x18\x03 \x01(\r\x12\x13\n\x0bscreen_lock\x18\x04 \x01(\x08\x12\x15\n\rsettings_lock\x18\x05 \x01(\x08\x12\x10\n\x08pin_code\x18\x06 \x01(\r\x12)\n\x05theme\x18\x07 \x01(\x0e\x32\x1a.meshtastic.protobuf.Theme\x12\x15\n\ralert_enabled\x18\x08 \x01(\x08\x12\x16\n\x0e\x62\x61nner_enabled\x18\t \x01(\x08\x12\x14\n\x0cring_tone_id\x18\n \x01(\r\x12/\n\x08language\x18\x0b \x01(\x0e\x32\x1d.meshtastic.protobuf.Language\x12\x34\n\x0bnode_filter\x18\x0c \x01(\x0b\x32\x1f.meshtastic.protobuf.NodeFilter\x12:\n\x0enode_highlight\x18\r \x01(\x0b\x32\".meshtastic.protobuf.NodeHighlight\x12\x18\n\x10\x63\x61libration_data\x18\x0e \x01(\x0c\x12*\n\x08map_data\x18\x0f \x01(\x0b\x32\x18.meshtastic.protobuf.Map\x12\x36\n\x0c\x63ompass_mode\x18\x10 \x01(\x0e\x32 .meshtastic.protobuf.CompassMode\x12\x18\n\x10screen_rgb_color\x18\x11 \x01(\r\x12\x1b\n\x13is_clockface_analog\x18\x12 \x01(\x08\x12K\n\ngps_format\x18\x13 \x01(\x0e\x32\x37.meshtastic.protobuf.DeviceUIConfig.GpsCoordinateFormat\"V\n\x13GpsCoordinateFormat\x12\x07\n\x03\x44\x45\x43\x10\x00\x12\x07\n\x03\x44MS\x10\x01\x12\x07\n\x03UTM\x10\x02\x12\x08\n\x04MGRS\x10\x03\x12\x07\n\x03OLC\x10\x04\x12\x08\n\x04OSGR\x10\x05\x12\x07\n\x03MLS\x10\x06\"\xa7\x01\n\nNodeFilter\x12\x16\n\x0eunknown_switch\x18\x01 \x01(\x08\x12\x16\n\x0eoffline_switch\x18\x02 \x01(\x08\x12\x19\n\x11public_key_switch\x18\x03 \x01(\x08\x12\x11\n\thops_away\x18\x04 \x01(\x05\x12\x17\n\x0fposition_switch\x18\x05 \x01(\x08\x12\x11\n\tnode_name\x18\x06 \x01(\t\x12\x0f\n\x07\x63hannel\x18\x07 \x01(\x05\"~\n\rNodeHighlight\x12\x13\n\x0b\x63hat_switch\x18\x01 \x01(\x08\x12\x17\n\x0fposition_switch\x18\x02 \x01(\x08\x12\x18\n\x10telemetry_switch\x18\x03 \x01(\x08\x12\x12\n\niaq_switch\x18\x04 \x01(\x08\x12\x11\n\tnode_name\x18\x05 \x01(\t\"=\n\x08GeoPoint\x12\x0c\n\x04zoom\x18\x01 \x01(\x05\x12\x10\n\x08latitude\x18\x02 \x01(\x05\x12\x11\n\tlongitude\x18\x03 \x01(\x05\"U\n\x03Map\x12+\n\x04home\x18\x01 \x01(\x0b\x32\x1d.meshtastic.protobuf.GeoPoint\x12\r\n\x05style\x18\x02 \x01(\t\x12\x12\n\nfollow_gps\x18\x03 \x01(\x08*>\n\x0b\x43ompassMode\x12\x0b\n\x07\x44YNAMIC\x10\x00\x12\x0e\n\nFIXED_RING\x10\x01\x12\x12\n\x0e\x46REEZE_HEADING\x10\x02*%\n\x05Theme\x12\x08\n\x04\x44\x41RK\x10\x00\x12\t\n\x05LIGHT\x10\x01\x12\x07\n\x03RED\x10\x02*\xc0\x02\n\x08Language\x12\x0b\n\x07\x45NGLISH\x10\x00\x12\n\n\x06\x46RENCH\x10\x01\x12\n\n\x06GERMAN\x10\x02\x12\x0b\n\x07ITALIAN\x10\x03\x12\x0e\n\nPORTUGUESE\x10\x04\x12\x0b\n\x07SPANISH\x10\x05\x12\x0b\n\x07SWEDISH\x10\x06\x12\x0b\n\x07\x46INNISH\x10\x07\x12\n\n\x06POLISH\x10\x08\x12\x0b\n\x07TURKISH\x10\t\x12\x0b\n\x07SERBIAN\x10\n\x12\x0b\n\x07RUSSIAN\x10\x0b\x12\t\n\x05\x44UTCH\x10\x0c\x12\t\n\x05GREEK\x10\r\x12\r\n\tNORWEGIAN\x10\x0e\x12\r\n\tSLOVENIAN\x10\x0f\x12\r\n\tUKRAINIAN\x10\x10\x12\r\n\tBULGARIAN\x10\x11\x12\t\n\x05\x43ZECH\x10\x12\x12\n\n\x06\x44\x41NISH\x10\x13\x12\x16\n\x12SIMPLIFIED_CHINESE\x10\x1e\x12\x17\n\x13TRADITIONAL_CHINESE\x10\x1f\x42\x64\n\x14org.meshtastic.protoB\x0e\x44\x65viceUIProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.device_ui_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\016DeviceUIProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_COMPASSMODE']._serialized_start=1113
_globals['_COMPASSMODE']._serialized_end=1175
_globals['_THEME']._serialized_start=1177
_globals['_THEME']._serialized_end=1214
_globals['_LANGUAGE']._serialized_start=1217
_globals['_LANGUAGE']._serialized_end=1514
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\016DeviceUIProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_COMPASSMODE']._serialized_start=1278
_globals['_COMPASSMODE']._serialized_end=1340
_globals['_THEME']._serialized_start=1342
_globals['_THEME']._serialized_end=1379
_globals['_LANGUAGE']._serialized_start=1382
_globals['_LANGUAGE']._serialized_end=1702
_globals['_DEVICEUICONFIG']._serialized_start=61
_globals['_DEVICEUICONFIG']._serialized_end=663
_globals['_NODEFILTER']._serialized_start=666
_globals['_NODEFILTER']._serialized_end=833
_globals['_NODEHIGHLIGHT']._serialized_start=835
_globals['_NODEHIGHLIGHT']._serialized_end=961
_globals['_GEOPOINT']._serialized_start=963
_globals['_GEOPOINT']._serialized_end=1024
_globals['_MAP']._serialized_start=1026
_globals['_MAP']._serialized_end=1111
_globals['_DEVICEUICONFIG']._serialized_end=828
_globals['_DEVICEUICONFIG_GPSCOORDINATEFORMAT']._serialized_start=742
_globals['_DEVICEUICONFIG_GPSCOORDINATEFORMAT']._serialized_end=828
_globals['_NODEFILTER']._serialized_start=831
_globals['_NODEFILTER']._serialized_end=998
_globals['_NODEHIGHLIGHT']._serialized_start=1000
_globals['_NODEHIGHLIGHT']._serialized_end=1126
_globals['_GEOPOINT']._serialized_start=1128
_globals['_GEOPOINT']._serialized_end=1189
_globals['_MAP']._serialized_start=1191
_globals['_MAP']._serialized_end=1276
# @@protoc_insertion_point(module_scope)

View File

@@ -165,6 +165,14 @@ class _LanguageEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumT
"""
Bulgarian
"""
CZECH: _Language.ValueType # 18
"""
Czech
"""
DANISH: _Language.ValueType # 19
"""
Danish
"""
SIMPLIFIED_CHINESE: _Language.ValueType # 30
"""
Simplified Chinese (experimental)
@@ -251,6 +259,14 @@ BULGARIAN: Language.ValueType # 17
"""
Bulgarian
"""
CZECH: Language.ValueType # 18
"""
Czech
"""
DANISH: Language.ValueType # 19
"""
Danish
"""
SIMPLIFIED_CHINESE: Language.ValueType # 30
"""
Simplified Chinese (experimental)
@@ -269,6 +285,91 @@ class DeviceUIConfig(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _GpsCoordinateFormat:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _GpsCoordinateFormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DeviceUIConfig._GpsCoordinateFormat.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEC: DeviceUIConfig._GpsCoordinateFormat.ValueType # 0
"""
GPS coordinates are displayed in the normal decimal degrees format:
DD.DDDDDD DDD.DDDDDD
"""
DMS: DeviceUIConfig._GpsCoordinateFormat.ValueType # 1
"""
GPS coordinates are displayed in the degrees minutes seconds format:
DD°MM'SS"C DDD°MM'SS"C, where C is the compass point representing the locations quadrant
"""
UTM: DeviceUIConfig._GpsCoordinateFormat.ValueType # 2
"""
Universal Transverse Mercator format:
ZZB EEEEEE NNNNNNN, where Z is zone, B is band, E is easting, N is northing
"""
MGRS: DeviceUIConfig._GpsCoordinateFormat.ValueType # 3
"""
Military Grid Reference System format:
ZZB CD EEEEE NNNNN, where Z is zone, B is band, C is the east 100k square, D is the north 100k square,
E is easting, N is northing
"""
OLC: DeviceUIConfig._GpsCoordinateFormat.ValueType # 4
"""
Open Location Code (aka Plus Codes).
"""
OSGR: DeviceUIConfig._GpsCoordinateFormat.ValueType # 5
"""
Ordnance Survey Grid Reference (the National Grid System of the UK).
Format: AB EEEEE NNNNN, where A is the east 100k square, B is the north 100k square,
E is the easting, N is the northing
"""
MLS: DeviceUIConfig._GpsCoordinateFormat.ValueType # 6
"""
Maidenhead Locator System
Described here: https://en.wikipedia.org/wiki/Maidenhead_Locator_System
"""
class GpsCoordinateFormat(_GpsCoordinateFormat, metaclass=_GpsCoordinateFormatEnumTypeWrapper):
"""
How the GPS coordinates are displayed on the OLED screen.
"""
DEC: DeviceUIConfig.GpsCoordinateFormat.ValueType # 0
"""
GPS coordinates are displayed in the normal decimal degrees format:
DD.DDDDDD DDD.DDDDDD
"""
DMS: DeviceUIConfig.GpsCoordinateFormat.ValueType # 1
"""
GPS coordinates are displayed in the degrees minutes seconds format:
DD°MM'SS"C DDD°MM'SS"C, where C is the compass point representing the locations quadrant
"""
UTM: DeviceUIConfig.GpsCoordinateFormat.ValueType # 2
"""
Universal Transverse Mercator format:
ZZB EEEEEE NNNNNNN, where Z is zone, B is band, E is easting, N is northing
"""
MGRS: DeviceUIConfig.GpsCoordinateFormat.ValueType # 3
"""
Military Grid Reference System format:
ZZB CD EEEEE NNNNN, where Z is zone, B is band, C is the east 100k square, D is the north 100k square,
E is easting, N is northing
"""
OLC: DeviceUIConfig.GpsCoordinateFormat.ValueType # 4
"""
Open Location Code (aka Plus Codes).
"""
OSGR: DeviceUIConfig.GpsCoordinateFormat.ValueType # 5
"""
Ordnance Survey Grid Reference (the National Grid System of the UK).
Format: AB EEEEE NNNNN, where A is the east 100k square, B is the north 100k square,
E is the easting, N is the northing
"""
MLS: DeviceUIConfig.GpsCoordinateFormat.ValueType # 6
"""
Maidenhead Locator System
Described here: https://en.wikipedia.org/wiki/Maidenhead_Locator_System
"""
VERSION_FIELD_NUMBER: builtins.int
SCREEN_BRIGHTNESS_FIELD_NUMBER: builtins.int
SCREEN_TIMEOUT_FIELD_NUMBER: builtins.int
@@ -287,6 +388,7 @@ class DeviceUIConfig(google.protobuf.message.Message):
COMPASS_MODE_FIELD_NUMBER: builtins.int
SCREEN_RGB_COLOR_FIELD_NUMBER: builtins.int
IS_CLOCKFACE_ANALOG_FIELD_NUMBER: builtins.int
GPS_FORMAT_FIELD_NUMBER: builtins.int
version: builtins.int
"""
A version integer used to invalidate saved files when we make incompatible changes.
@@ -337,6 +439,10 @@ class DeviceUIConfig(google.protobuf.message.Message):
Clockface analog style
true for analog clockface, false for digital clockface
"""
gps_format: global___DeviceUIConfig.GpsCoordinateFormat.ValueType
"""
How the GPS coordinates are formatted on the OLED screen.
"""
@property
def node_filter(self) -> global___NodeFilter:
"""
@@ -376,9 +482,10 @@ class DeviceUIConfig(google.protobuf.message.Message):
compass_mode: global___CompassMode.ValueType = ...,
screen_rgb_color: builtins.int = ...,
is_clockface_analog: builtins.bool = ...,
gps_format: global___DeviceUIConfig.GpsCoordinateFormat.ValueType = ...,
) -> None: ...
def HasField(self, field_name: typing.Literal["map_data", b"map_data", "node_filter", b"node_filter", "node_highlight", b"node_highlight"]) -> builtins.bool: ...
def ClearField(self, field_name: typing.Literal["alert_enabled", b"alert_enabled", "banner_enabled", b"banner_enabled", "calibration_data", b"calibration_data", "compass_mode", b"compass_mode", "is_clockface_analog", b"is_clockface_analog", "language", b"language", "map_data", b"map_data", "node_filter", b"node_filter", "node_highlight", b"node_highlight", "pin_code", b"pin_code", "ring_tone_id", b"ring_tone_id", "screen_brightness", b"screen_brightness", "screen_lock", b"screen_lock", "screen_rgb_color", b"screen_rgb_color", "screen_timeout", b"screen_timeout", "settings_lock", b"settings_lock", "theme", b"theme", "version", b"version"]) -> None: ...
def ClearField(self, field_name: typing.Literal["alert_enabled", b"alert_enabled", "banner_enabled", b"banner_enabled", "calibration_data", b"calibration_data", "compass_mode", b"compass_mode", "gps_format", b"gps_format", "is_clockface_analog", b"is_clockface_analog", "language", b"language", "map_data", b"map_data", "node_filter", b"node_filter", "node_highlight", b"node_highlight", "pin_code", b"pin_code", "ring_tone_id", b"ring_tone_id", "screen_brightness", b"screen_brightness", "screen_lock", b"screen_lock", "screen_rgb_color", b"screen_rgb_color", "screen_timeout", b"screen_timeout", "settings_lock", b"settings_lock", "theme", b"theme", "version", b"version"]) -> None: ...
global___DeviceUIConfig = DeviceUIConfig

View File

@@ -19,14 +19,14 @@ from meshtastic.protobuf import telemetry_pb2 as meshtastic_dot_protobuf_dot_tel
from meshtastic.protobuf import nanopb_pb2 as meshtastic_dot_protobuf_dot_nanopb__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$meshtastic/protobuf/deviceonly.proto\x12\x13meshtastic.protobuf\x1a!meshtastic/protobuf/channel.proto\x1a meshtastic/protobuf/config.proto\x1a#meshtastic/protobuf/localonly.proto\x1a\x1emeshtastic/protobuf/mesh.proto\x1a#meshtastic/protobuf/telemetry.proto\x1a meshtastic/protobuf/nanopb.proto\"\x99\x01\n\x0cPositionLite\x12\x12\n\nlatitude_i\x18\x01 \x01(\x0f\x12\x13\n\x0blongitude_i\x18\x02 \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x03 \x01(\x05\x12\x0c\n\x04time\x18\x04 \x01(\x07\x12@\n\x0flocation_source\x18\x05 \x01(\x0e\x32\'.meshtastic.protobuf.Position.LocSource\"\x94\x02\n\x08UserLite\x12\x13\n\x07macaddr\x18\x01 \x01(\x0c\x42\x02\x18\x01\x12\x11\n\tlong_name\x18\x02 \x01(\t\x12\x12\n\nshort_name\x18\x03 \x01(\t\x12\x34\n\x08hw_model\x18\x04 \x01(\x0e\x32\".meshtastic.protobuf.HardwareModel\x12\x13\n\x0bis_licensed\x18\x05 \x01(\x08\x12;\n\x04role\x18\x06 \x01(\x0e\x32-.meshtastic.protobuf.Config.DeviceConfig.Role\x12\x12\n\npublic_key\x18\x07 \x01(\x0c\x12\x1c\n\x0fis_unmessagable\x18\t \x01(\x08H\x00\x88\x01\x01\x42\x12\n\x10_is_unmessagable\"\xf0\x02\n\x0cNodeInfoLite\x12\x0b\n\x03num\x18\x01 \x01(\r\x12+\n\x04user\x18\x02 \x01(\x0b\x32\x1d.meshtastic.protobuf.UserLite\x12\x33\n\x08position\x18\x03 \x01(\x0b\x32!.meshtastic.protobuf.PositionLite\x12\x0b\n\x03snr\x18\x04 \x01(\x02\x12\x12\n\nlast_heard\x18\x05 \x01(\x07\x12:\n\x0e\x64\x65vice_metrics\x18\x06 \x01(\x0b\x32\".meshtastic.protobuf.DeviceMetrics\x12\x0f\n\x07\x63hannel\x18\x07 \x01(\r\x12\x10\n\x08via_mqtt\x18\x08 \x01(\x08\x12\x16\n\thops_away\x18\t \x01(\rH\x00\x88\x01\x01\x12\x13\n\x0bis_favorite\x18\n \x01(\x08\x12\x12\n\nis_ignored\x18\x0b \x01(\x08\x12\x10\n\x08next_hop\x18\x0c \x01(\r\x12\x10\n\x08\x62itfield\x18\r \x01(\rB\x0c\n\n_hops_away\"\xa1\x03\n\x0b\x44\x65viceState\x12\x30\n\x07my_node\x18\x02 \x01(\x0b\x32\x1f.meshtastic.protobuf.MyNodeInfo\x12(\n\x05owner\x18\x03 \x01(\x0b\x32\x19.meshtastic.protobuf.User\x12\x36\n\rreceive_queue\x18\x05 \x03(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x0f\n\x07version\x18\x08 \x01(\r\x12\x38\n\x0frx_text_message\x18\x07 \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x13\n\x07no_save\x18\t \x01(\x08\x42\x02\x18\x01\x12\x19\n\rdid_gps_reset\x18\x0b \x01(\x08\x42\x02\x18\x01\x12\x34\n\x0brx_waypoint\x18\x0c \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12M\n\x19node_remote_hardware_pins\x18\r \x03(\x0b\x32*.meshtastic.protobuf.NodeRemoteHardwarePin\"}\n\x0cNodeDatabase\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\\\n\x05nodes\x18\x02 \x03(\x0b\x32!.meshtastic.protobuf.NodeInfoLiteB*\x92?\'\x92\x01$std::vector<meshtastic_NodeInfoLite>\"N\n\x0b\x43hannelFile\x12.\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1c.meshtastic.protobuf.Channel\x12\x0f\n\x07version\x18\x02 \x01(\r\"\x86\x02\n\x11\x42\x61\x63kupPreferences\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\x11\n\ttimestamp\x18\x02 \x01(\x07\x12\x30\n\x06\x63onfig\x18\x03 \x01(\x0b\x32 .meshtastic.protobuf.LocalConfig\x12=\n\rmodule_config\x18\x04 \x01(\x0b\x32&.meshtastic.protobuf.LocalModuleConfig\x12\x32\n\x08\x63hannels\x18\x05 \x01(\x0b\x32 .meshtastic.protobuf.ChannelFile\x12(\n\x05owner\x18\x06 \x01(\x0b\x32\x19.meshtastic.protobuf.UserBm\n\x13\x63om.geeksville.meshB\nDeviceOnlyZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x92?\x0b\xc2\x01\x08<vector>b\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$meshtastic/protobuf/deviceonly.proto\x12\x13meshtastic.protobuf\x1a!meshtastic/protobuf/channel.proto\x1a meshtastic/protobuf/config.proto\x1a#meshtastic/protobuf/localonly.proto\x1a\x1emeshtastic/protobuf/mesh.proto\x1a#meshtastic/protobuf/telemetry.proto\x1a meshtastic/protobuf/nanopb.proto\"\x99\x01\n\x0cPositionLite\x12\x12\n\nlatitude_i\x18\x01 \x01(\x0f\x12\x13\n\x0blongitude_i\x18\x02 \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x03 \x01(\x05\x12\x0c\n\x04time\x18\x04 \x01(\x07\x12@\n\x0flocation_source\x18\x05 \x01(\x0e\x32\'.meshtastic.protobuf.Position.LocSource\"\x94\x02\n\x08UserLite\x12\x13\n\x07macaddr\x18\x01 \x01(\x0c\x42\x02\x18\x01\x12\x11\n\tlong_name\x18\x02 \x01(\t\x12\x12\n\nshort_name\x18\x03 \x01(\t\x12\x34\n\x08hw_model\x18\x04 \x01(\x0e\x32\".meshtastic.protobuf.HardwareModel\x12\x13\n\x0bis_licensed\x18\x05 \x01(\x08\x12;\n\x04role\x18\x06 \x01(\x0e\x32-.meshtastic.protobuf.Config.DeviceConfig.Role\x12\x12\n\npublic_key\x18\x07 \x01(\x0c\x12\x1c\n\x0fis_unmessagable\x18\t \x01(\x08H\x00\x88\x01\x01\x42\x12\n\x10_is_unmessagable\"\xf0\x02\n\x0cNodeInfoLite\x12\x0b\n\x03num\x18\x01 \x01(\r\x12+\n\x04user\x18\x02 \x01(\x0b\x32\x1d.meshtastic.protobuf.UserLite\x12\x33\n\x08position\x18\x03 \x01(\x0b\x32!.meshtastic.protobuf.PositionLite\x12\x0b\n\x03snr\x18\x04 \x01(\x02\x12\x12\n\nlast_heard\x18\x05 \x01(\x07\x12:\n\x0e\x64\x65vice_metrics\x18\x06 \x01(\x0b\x32\".meshtastic.protobuf.DeviceMetrics\x12\x0f\n\x07\x63hannel\x18\x07 \x01(\r\x12\x10\n\x08via_mqtt\x18\x08 \x01(\x08\x12\x16\n\thops_away\x18\t \x01(\rH\x00\x88\x01\x01\x12\x13\n\x0bis_favorite\x18\n \x01(\x08\x12\x12\n\nis_ignored\x18\x0b \x01(\x08\x12\x10\n\x08next_hop\x18\x0c \x01(\r\x12\x10\n\x08\x62itfield\x18\r \x01(\rB\x0c\n\n_hops_away\"\xa1\x03\n\x0b\x44\x65viceState\x12\x30\n\x07my_node\x18\x02 \x01(\x0b\x32\x1f.meshtastic.protobuf.MyNodeInfo\x12(\n\x05owner\x18\x03 \x01(\x0b\x32\x19.meshtastic.protobuf.User\x12\x36\n\rreceive_queue\x18\x05 \x03(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x0f\n\x07version\x18\x08 \x01(\r\x12\x38\n\x0frx_text_message\x18\x07 \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x13\n\x07no_save\x18\t \x01(\x08\x42\x02\x18\x01\x12\x19\n\rdid_gps_reset\x18\x0b \x01(\x08\x42\x02\x18\x01\x12\x34\n\x0brx_waypoint\x18\x0c \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12M\n\x19node_remote_hardware_pins\x18\r \x03(\x0b\x32*.meshtastic.protobuf.NodeRemoteHardwarePin\"}\n\x0cNodeDatabase\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\\\n\x05nodes\x18\x02 \x03(\x0b\x32!.meshtastic.protobuf.NodeInfoLiteB*\x92?\'\x92\x01$std::vector<meshtastic_NodeInfoLite>\"N\n\x0b\x43hannelFile\x12.\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1c.meshtastic.protobuf.Channel\x12\x0f\n\x07version\x18\x02 \x01(\r\"\x86\x02\n\x11\x42\x61\x63kupPreferences\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\x11\n\ttimestamp\x18\x02 \x01(\x07\x12\x30\n\x06\x63onfig\x18\x03 \x01(\x0b\x32 .meshtastic.protobuf.LocalConfig\x12=\n\rmodule_config\x18\x04 \x01(\x0b\x32&.meshtastic.protobuf.LocalModuleConfig\x12\x32\n\x08\x63hannels\x18\x05 \x01(\x0b\x32 .meshtastic.protobuf.ChannelFile\x12(\n\x05owner\x18\x06 \x01(\x0b\x32\x19.meshtastic.protobuf.UserBn\n\x14org.meshtastic.protoB\nDeviceOnlyZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x92?\x0b\xc2\x01\x08<vector>b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.deviceonly_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\nDeviceOnlyZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000\222?\013\302\001\010<vector>'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\nDeviceOnlyZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000\222?\013\302\001\010<vector>'
_USERLITE.fields_by_name['macaddr']._options = None
_USERLITE.fields_by_name['macaddr']._serialized_options = b'\030\001'
_DEVICESTATE.fields_by_name['no_save']._options = None

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%meshtastic/protobuf/interdevice.proto\x12\x13meshtastic.protobuf\"s\n\nSensorData\x12.\n\x04type\x18\x01 \x01(\x0e\x32 .meshtastic.protobuf.MessageType\x12\x15\n\x0b\x66loat_value\x18\x02 \x01(\x02H\x00\x12\x16\n\x0cuint32_value\x18\x03 \x01(\rH\x00\x42\x06\n\x04\x64\x61ta\"_\n\x12InterdeviceMessage\x12\x0e\n\x04nmea\x18\x01 \x01(\tH\x00\x12\x31\n\x06sensor\x18\x02 \x01(\x0b\x32\x1f.meshtastic.protobuf.SensorDataH\x00\x42\x06\n\x04\x64\x61ta*\xd5\x01\n\x0bMessageType\x12\x07\n\x03\x41\x43K\x10\x00\x12\x15\n\x10\x43OLLECT_INTERVAL\x10\xa0\x01\x12\x0c\n\x07\x42\x45\x45P_ON\x10\xa1\x01\x12\r\n\x08\x42\x45\x45P_OFF\x10\xa2\x01\x12\r\n\x08SHUTDOWN\x10\xa3\x01\x12\r\n\x08POWER_ON\x10\xa4\x01\x12\x0f\n\nSCD41_TEMP\x10\xb0\x01\x12\x13\n\x0eSCD41_HUMIDITY\x10\xb1\x01\x12\x0e\n\tSCD41_CO2\x10\xb2\x01\x12\x0f\n\nAHT20_TEMP\x10\xb3\x01\x12\x13\n\x0e\x41HT20_HUMIDITY\x10\xb4\x01\x12\x0f\n\nTVOC_INDEX\x10\xb5\x01\x42\x66\n\x13\x63om.geeksville.meshB\x11InterdeviceProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%meshtastic/protobuf/interdevice.proto\x12\x13meshtastic.protobuf\"s\n\nSensorData\x12.\n\x04type\x18\x01 \x01(\x0e\x32 .meshtastic.protobuf.MessageType\x12\x15\n\x0b\x66loat_value\x18\x02 \x01(\x02H\x00\x12\x16\n\x0cuint32_value\x18\x03 \x01(\rH\x00\x42\x06\n\x04\x64\x61ta\"_\n\x12InterdeviceMessage\x12\x0e\n\x04nmea\x18\x01 \x01(\tH\x00\x12\x31\n\x06sensor\x18\x02 \x01(\x0b\x32\x1f.meshtastic.protobuf.SensorDataH\x00\x42\x06\n\x04\x64\x61ta*\xd5\x01\n\x0bMessageType\x12\x07\n\x03\x41\x43K\x10\x00\x12\x15\n\x10\x43OLLECT_INTERVAL\x10\xa0\x01\x12\x0c\n\x07\x42\x45\x45P_ON\x10\xa1\x01\x12\r\n\x08\x42\x45\x45P_OFF\x10\xa2\x01\x12\r\n\x08SHUTDOWN\x10\xa3\x01\x12\r\n\x08POWER_ON\x10\xa4\x01\x12\x0f\n\nSCD41_TEMP\x10\xb0\x01\x12\x13\n\x0eSCD41_HUMIDITY\x10\xb1\x01\x12\x0e\n\tSCD41_CO2\x10\xb2\x01\x12\x0f\n\nAHT20_TEMP\x10\xb3\x01\x12\x13\n\x0e\x41HT20_HUMIDITY\x10\xb4\x01\x12\x0f\n\nTVOC_INDEX\x10\xb5\x01\x42g\n\x14org.meshtastic.protoB\x11InterdeviceProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.interdevice_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\021InterdeviceProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\021InterdeviceProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_MESSAGETYPE']._serialized_start=277
_globals['_MESSAGETYPE']._serialized_end=490
_globals['_SENSORDATA']._serialized_start=62

View File

@@ -15,14 +15,14 @@ from meshtastic.protobuf import config_pb2 as meshtastic_dot_protobuf_dot_config
from meshtastic.protobuf import module_config_pb2 as meshtastic_dot_protobuf_dot_module__config__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#meshtastic/protobuf/localonly.proto\x12\x13meshtastic.protobuf\x1a meshtastic/protobuf/config.proto\x1a\'meshtastic/protobuf/module_config.proto\"\xfa\x03\n\x0bLocalConfig\x12\x38\n\x06\x64\x65vice\x18\x01 \x01(\x0b\x32(.meshtastic.protobuf.Config.DeviceConfig\x12<\n\x08position\x18\x02 \x01(\x0b\x32*.meshtastic.protobuf.Config.PositionConfig\x12\x36\n\x05power\x18\x03 \x01(\x0b\x32\'.meshtastic.protobuf.Config.PowerConfig\x12:\n\x07network\x18\x04 \x01(\x0b\x32).meshtastic.protobuf.Config.NetworkConfig\x12:\n\x07\x64isplay\x18\x05 \x01(\x0b\x32).meshtastic.protobuf.Config.DisplayConfig\x12\x34\n\x04lora\x18\x06 \x01(\x0b\x32&.meshtastic.protobuf.Config.LoRaConfig\x12>\n\tbluetooth\x18\x07 \x01(\x0b\x32+.meshtastic.protobuf.Config.BluetoothConfig\x12\x0f\n\x07version\x18\x08 \x01(\r\x12<\n\x08security\x18\t \x01(\x0b\x32*.meshtastic.protobuf.Config.SecurityConfig\"\xf0\x07\n\x11LocalModuleConfig\x12:\n\x04mqtt\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.ModuleConfig.MQTTConfig\x12>\n\x06serial\x18\x02 \x01(\x0b\x32..meshtastic.protobuf.ModuleConfig.SerialConfig\x12[\n\x15\x65xternal_notification\x18\x03 \x01(\x0b\x32<.meshtastic.protobuf.ModuleConfig.ExternalNotificationConfig\x12K\n\rstore_forward\x18\x04 \x01(\x0b\x32\x34.meshtastic.protobuf.ModuleConfig.StoreForwardConfig\x12\x45\n\nrange_test\x18\x05 \x01(\x0b\x32\x31.meshtastic.protobuf.ModuleConfig.RangeTestConfig\x12\x44\n\ttelemetry\x18\x06 \x01(\x0b\x32\x31.meshtastic.protobuf.ModuleConfig.TelemetryConfig\x12M\n\x0e\x63\x61nned_message\x18\x07 \x01(\x0b\x32\x35.meshtastic.protobuf.ModuleConfig.CannedMessageConfig\x12<\n\x05\x61udio\x18\t \x01(\x0b\x32-.meshtastic.protobuf.ModuleConfig.AudioConfig\x12O\n\x0fremote_hardware\x18\n \x01(\x0b\x32\x36.meshtastic.protobuf.ModuleConfig.RemoteHardwareConfig\x12K\n\rneighbor_info\x18\x0b \x01(\x0b\x32\x34.meshtastic.protobuf.ModuleConfig.NeighborInfoConfig\x12Q\n\x10\x61mbient_lighting\x18\x0c \x01(\x0b\x32\x37.meshtastic.protobuf.ModuleConfig.AmbientLightingConfig\x12Q\n\x10\x64\x65tection_sensor\x18\r \x01(\x0b\x32\x37.meshtastic.protobuf.ModuleConfig.DetectionSensorConfig\x12\x46\n\npaxcounter\x18\x0e \x01(\x0b\x32\x32.meshtastic.protobuf.ModuleConfig.PaxcounterConfig\x12\x0f\n\x07version\x18\x08 \x01(\rBd\n\x13\x63om.geeksville.meshB\x0fLocalOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#meshtastic/protobuf/localonly.proto\x12\x13meshtastic.protobuf\x1a meshtastic/protobuf/config.proto\x1a\'meshtastic/protobuf/module_config.proto\"\xfa\x03\n\x0bLocalConfig\x12\x38\n\x06\x64\x65vice\x18\x01 \x01(\x0b\x32(.meshtastic.protobuf.Config.DeviceConfig\x12<\n\x08position\x18\x02 \x01(\x0b\x32*.meshtastic.protobuf.Config.PositionConfig\x12\x36\n\x05power\x18\x03 \x01(\x0b\x32\'.meshtastic.protobuf.Config.PowerConfig\x12:\n\x07network\x18\x04 \x01(\x0b\x32).meshtastic.protobuf.Config.NetworkConfig\x12:\n\x07\x64isplay\x18\x05 \x01(\x0b\x32).meshtastic.protobuf.Config.DisplayConfig\x12\x34\n\x04lora\x18\x06 \x01(\x0b\x32&.meshtastic.protobuf.Config.LoRaConfig\x12>\n\tbluetooth\x18\x07 \x01(\x0b\x32+.meshtastic.protobuf.Config.BluetoothConfig\x12\x0f\n\x07version\x18\x08 \x01(\r\x12<\n\x08security\x18\t \x01(\x0b\x32*.meshtastic.protobuf.Config.SecurityConfig\"\xf0\x07\n\x11LocalModuleConfig\x12:\n\x04mqtt\x18\x01 \x01(\x0b\x32,.meshtastic.protobuf.ModuleConfig.MQTTConfig\x12>\n\x06serial\x18\x02 \x01(\x0b\x32..meshtastic.protobuf.ModuleConfig.SerialConfig\x12[\n\x15\x65xternal_notification\x18\x03 \x01(\x0b\x32<.meshtastic.protobuf.ModuleConfig.ExternalNotificationConfig\x12K\n\rstore_forward\x18\x04 \x01(\x0b\x32\x34.meshtastic.protobuf.ModuleConfig.StoreForwardConfig\x12\x45\n\nrange_test\x18\x05 \x01(\x0b\x32\x31.meshtastic.protobuf.ModuleConfig.RangeTestConfig\x12\x44\n\ttelemetry\x18\x06 \x01(\x0b\x32\x31.meshtastic.protobuf.ModuleConfig.TelemetryConfig\x12M\n\x0e\x63\x61nned_message\x18\x07 \x01(\x0b\x32\x35.meshtastic.protobuf.ModuleConfig.CannedMessageConfig\x12<\n\x05\x61udio\x18\t \x01(\x0b\x32-.meshtastic.protobuf.ModuleConfig.AudioConfig\x12O\n\x0fremote_hardware\x18\n \x01(\x0b\x32\x36.meshtastic.protobuf.ModuleConfig.RemoteHardwareConfig\x12K\n\rneighbor_info\x18\x0b \x01(\x0b\x32\x34.meshtastic.protobuf.ModuleConfig.NeighborInfoConfig\x12Q\n\x10\x61mbient_lighting\x18\x0c \x01(\x0b\x32\x37.meshtastic.protobuf.ModuleConfig.AmbientLightingConfig\x12Q\n\x10\x64\x65tection_sensor\x18\r \x01(\x0b\x32\x37.meshtastic.protobuf.ModuleConfig.DetectionSensorConfig\x12\x46\n\npaxcounter\x18\x0e \x01(\x0b\x32\x32.meshtastic.protobuf.ModuleConfig.PaxcounterConfig\x12\x0f\n\x07version\x18\x08 \x01(\rBe\n\x14org.meshtastic.protoB\x0fLocalOnlyProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.localonly_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\017LocalOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\017LocalOnlyProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_LOCALCONFIG']._serialized_start=136
_globals['_LOCALCONFIG']._serialized_end=642
_globals['_LOCALMODULECONFIG']._serialized_start=645

File diff suppressed because one or more lines are too long

View File

@@ -453,9 +453,9 @@ class _HardwareModelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._
"""
Seeed Tracker L1 EINK driver
"""
QWANTZ_TINY_ARMS: _HardwareModel.ValueType # 101
MUZI_R1_NEO: _HardwareModel.ValueType # 101
"""
Reserved ID for future and past use
Muzi Works R1 Neo
"""
T_DECK_PRO: _HardwareModel.ValueType # 102
"""
@@ -465,9 +465,10 @@ class _HardwareModelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._
"""
Lilygo TLora Pager
"""
GAT562_MESH_TRIAL_TRACKER: _HardwareModel.ValueType # 104
M5STACK_RESERVED: _HardwareModel.ValueType # 104
"""
GAT562 Mesh Trial Tracker
M5Stack Reserved
0x68
"""
WISMESH_TAG: _HardwareModel.ValueType # 105
"""
@@ -486,6 +487,42 @@ class _HardwareModelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._
MeshSolar is an integrated power management and communication solution designed for outdoor low-power devices.
https://heltec.org/project/meshsolar/
"""
T_ECHO_LITE: _HardwareModel.ValueType # 109
"""
Lilygo T-Echo Lite
"""
HELTEC_V4: _HardwareModel.ValueType # 110
"""
New Heltec LoRA32 with ESP32-S3 CPU
"""
M5STACK_C6L: _HardwareModel.ValueType # 111
"""
M5Stack C6L
"""
M5STACK_CARDPUTER_ADV: _HardwareModel.ValueType # 112
"""
M5Stack Cardputer Adv
"""
HELTEC_WIRELESS_TRACKER_V2: _HardwareModel.ValueType # 113
"""
ESP32S3 main controller with GPS and TFT screen.
"""
T_WATCH_ULTRA: _HardwareModel.ValueType # 114
"""
LilyGo T-Watch Ultra
"""
THINKNODE_M3: _HardwareModel.ValueType # 115
"""
Elecrow ThinkNode M3
"""
WISMESH_TAP_V2: _HardwareModel.ValueType # 116
"""
RAK WISMESH_TAP_V2 with ESP32-S3 CPU
"""
RAK3401: _HardwareModel.ValueType # 117
"""
RAK3401
"""
PRIVATE_HW: _HardwareModel.ValueType # 255
"""
------------------------------------------------------------------------------------------------------------------------------------------
@@ -922,9 +959,9 @@ SEEED_WIO_TRACKER_L1_EINK: HardwareModel.ValueType # 100
"""
Seeed Tracker L1 EINK driver
"""
QWANTZ_TINY_ARMS: HardwareModel.ValueType # 101
MUZI_R1_NEO: HardwareModel.ValueType # 101
"""
Reserved ID for future and past use
Muzi Works R1 Neo
"""
T_DECK_PRO: HardwareModel.ValueType # 102
"""
@@ -934,9 +971,10 @@ T_LORA_PAGER: HardwareModel.ValueType # 103
"""
Lilygo TLora Pager
"""
GAT562_MESH_TRIAL_TRACKER: HardwareModel.ValueType # 104
M5STACK_RESERVED: HardwareModel.ValueType # 104
"""
GAT562 Mesh Trial Tracker
M5Stack Reserved
0x68
"""
WISMESH_TAG: HardwareModel.ValueType # 105
"""
@@ -955,6 +993,42 @@ HELTEC_MESH_SOLAR: HardwareModel.ValueType # 108
MeshSolar is an integrated power management and communication solution designed for outdoor low-power devices.
https://heltec.org/project/meshsolar/
"""
T_ECHO_LITE: HardwareModel.ValueType # 109
"""
Lilygo T-Echo Lite
"""
HELTEC_V4: HardwareModel.ValueType # 110
"""
New Heltec LoRA32 with ESP32-S3 CPU
"""
M5STACK_C6L: HardwareModel.ValueType # 111
"""
M5Stack C6L
"""
M5STACK_CARDPUTER_ADV: HardwareModel.ValueType # 112
"""
M5Stack Cardputer Adv
"""
HELTEC_WIRELESS_TRACKER_V2: HardwareModel.ValueType # 113
"""
ESP32S3 main controller with GPS and TFT screen.
"""
T_WATCH_ULTRA: HardwareModel.ValueType # 114
"""
LilyGo T-Watch Ultra
"""
THINKNODE_M3: HardwareModel.ValueType # 115
"""
Elecrow ThinkNode M3
"""
WISMESH_TAP_V2: HardwareModel.ValueType # 116
"""
RAK WISMESH_TAP_V2 with ESP32-S3 CPU
"""
RAK3401: HardwareModel.ValueType # 117
"""
RAK3401
"""
PRIVATE_HW: HardwareModel.ValueType # 255
"""
------------------------------------------------------------------------------------------------------------------------------------------

File diff suppressed because one or more lines are too long

View File

@@ -824,6 +824,7 @@ class ModuleConfig(google.protobuf.message.Message):
ENABLED_FIELD_NUMBER: builtins.int
SENDER_FIELD_NUMBER: builtins.int
SAVE_FIELD_NUMBER: builtins.int
CLEAR_ON_REBOOT_FIELD_NUMBER: builtins.int
enabled: builtins.bool
"""
Enable the Range Test Module
@@ -837,14 +838,20 @@ class ModuleConfig(google.protobuf.message.Message):
Bool value indicating that this node should save a RangeTest.csv file.
ESP32 Only
"""
clear_on_reboot: builtins.bool
"""
Bool indicating that the node should cleanup / destroy it's RangeTest.csv file.
ESP32 Only
"""
def __init__(
self,
*,
enabled: builtins.bool = ...,
sender: builtins.int = ...,
save: builtins.bool = ...,
clear_on_reboot: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["enabled", b"enabled", "save", b"save", "sender", b"sender"]) -> None: ...
def ClearField(self, field_name: typing.Literal["clear_on_reboot", b"clear_on_reboot", "enabled", b"enabled", "save", b"save", "sender", b"sender"]) -> None: ...
@typing.final
class TelemetryConfig(google.protobuf.message.Message):
@@ -867,6 +874,7 @@ class ModuleConfig(google.protobuf.message.Message):
HEALTH_MEASUREMENT_ENABLED_FIELD_NUMBER: builtins.int
HEALTH_UPDATE_INTERVAL_FIELD_NUMBER: builtins.int
HEALTH_SCREEN_ENABLED_FIELD_NUMBER: builtins.int
DEVICE_TELEMETRY_ENABLED_FIELD_NUMBER: builtins.int
device_update_interval: builtins.int
"""
Interval in seconds of how often we should try to send our
@@ -927,6 +935,11 @@ class ModuleConfig(google.protobuf.message.Message):
"""
Enable/Disable the health telemetry module on-device display
"""
device_telemetry_enabled: builtins.bool
"""
Enable/Disable the device telemetry module to send metrics to the mesh
Note: We will still send telemtry to the connected phone / client every minute over the API
"""
def __init__(
self,
*,
@@ -943,8 +956,9 @@ class ModuleConfig(google.protobuf.message.Message):
health_measurement_enabled: builtins.bool = ...,
health_update_interval: builtins.int = ...,
health_screen_enabled: builtins.bool = ...,
device_telemetry_enabled: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["air_quality_enabled", b"air_quality_enabled", "air_quality_interval", b"air_quality_interval", "device_update_interval", b"device_update_interval", "environment_display_fahrenheit", b"environment_display_fahrenheit", "environment_measurement_enabled", b"environment_measurement_enabled", "environment_screen_enabled", b"environment_screen_enabled", "environment_update_interval", b"environment_update_interval", "health_measurement_enabled", b"health_measurement_enabled", "health_screen_enabled", b"health_screen_enabled", "health_update_interval", b"health_update_interval", "power_measurement_enabled", b"power_measurement_enabled", "power_screen_enabled", b"power_screen_enabled", "power_update_interval", b"power_update_interval"]) -> None: ...
def ClearField(self, field_name: typing.Literal["air_quality_enabled", b"air_quality_enabled", "air_quality_interval", b"air_quality_interval", "device_telemetry_enabled", b"device_telemetry_enabled", "device_update_interval", b"device_update_interval", "environment_display_fahrenheit", b"environment_display_fahrenheit", "environment_measurement_enabled", b"environment_measurement_enabled", "environment_screen_enabled", b"environment_screen_enabled", "environment_update_interval", b"environment_update_interval", "health_measurement_enabled", b"health_measurement_enabled", "health_screen_enabled", b"health_screen_enabled", "health_update_interval", b"health_update_interval", "power_measurement_enabled", b"power_measurement_enabled", "power_screen_enabled", b"power_screen_enabled", "power_update_interval", b"power_update_interval"]) -> None: ...
@typing.final
class CannedMessageConfig(google.protobuf.message.Message):

View File

@@ -15,14 +15,14 @@ from meshtastic.protobuf import config_pb2 as meshtastic_dot_protobuf_dot_config
from meshtastic.protobuf import mesh_pb2 as meshtastic_dot_protobuf_dot_mesh__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1emeshtastic/protobuf/mqtt.proto\x12\x13meshtastic.protobuf\x1a meshtastic/protobuf/config.proto\x1a\x1emeshtastic/protobuf/mesh.proto\"j\n\x0fServiceEnvelope\x12/\n\x06packet\x18\x01 \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x12\n\nchannel_id\x18\x02 \x01(\t\x12\x12\n\ngateway_id\x18\x03 \x01(\t\"\x83\x04\n\tMapReport\x12\x11\n\tlong_name\x18\x01 \x01(\t\x12\x12\n\nshort_name\x18\x02 \x01(\t\x12;\n\x04role\x18\x03 \x01(\x0e\x32-.meshtastic.protobuf.Config.DeviceConfig.Role\x12\x34\n\x08hw_model\x18\x04 \x01(\x0e\x32\".meshtastic.protobuf.HardwareModel\x12\x18\n\x10\x66irmware_version\x18\x05 \x01(\t\x12\x41\n\x06region\x18\x06 \x01(\x0e\x32\x31.meshtastic.protobuf.Config.LoRaConfig.RegionCode\x12H\n\x0cmodem_preset\x18\x07 \x01(\x0e\x32\x32.meshtastic.protobuf.Config.LoRaConfig.ModemPreset\x12\x1b\n\x13has_default_channel\x18\x08 \x01(\x08\x12\x12\n\nlatitude_i\x18\t \x01(\x0f\x12\x13\n\x0blongitude_i\x18\n \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x0b \x01(\x05\x12\x1a\n\x12position_precision\x18\x0c \x01(\r\x12\x1e\n\x16num_online_local_nodes\x18\r \x01(\r\x12!\n\x19has_opted_report_location\x18\x0e \x01(\x08\x42_\n\x13\x63om.geeksville.meshB\nMQTTProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1emeshtastic/protobuf/mqtt.proto\x12\x13meshtastic.protobuf\x1a meshtastic/protobuf/config.proto\x1a\x1emeshtastic/protobuf/mesh.proto\"j\n\x0fServiceEnvelope\x12/\n\x06packet\x18\x01 \x01(\x0b\x32\x1f.meshtastic.protobuf.MeshPacket\x12\x12\n\nchannel_id\x18\x02 \x01(\t\x12\x12\n\ngateway_id\x18\x03 \x01(\t\"\x83\x04\n\tMapReport\x12\x11\n\tlong_name\x18\x01 \x01(\t\x12\x12\n\nshort_name\x18\x02 \x01(\t\x12;\n\x04role\x18\x03 \x01(\x0e\x32-.meshtastic.protobuf.Config.DeviceConfig.Role\x12\x34\n\x08hw_model\x18\x04 \x01(\x0e\x32\".meshtastic.protobuf.HardwareModel\x12\x18\n\x10\x66irmware_version\x18\x05 \x01(\t\x12\x41\n\x06region\x18\x06 \x01(\x0e\x32\x31.meshtastic.protobuf.Config.LoRaConfig.RegionCode\x12H\n\x0cmodem_preset\x18\x07 \x01(\x0e\x32\x32.meshtastic.protobuf.Config.LoRaConfig.ModemPreset\x12\x1b\n\x13has_default_channel\x18\x08 \x01(\x08\x12\x12\n\nlatitude_i\x18\t \x01(\x0f\x12\x13\n\x0blongitude_i\x18\n \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x0b \x01(\x05\x12\x1a\n\x12position_precision\x18\x0c \x01(\r\x12\x1e\n\x16num_online_local_nodes\x18\r \x01(\r\x12!\n\x19has_opted_report_location\x18\x0e \x01(\x08\x42`\n\x14org.meshtastic.protoB\nMQTTProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.mqtt_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\nMQTTProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\nMQTTProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_SERVICEENVELOPE']._serialized_start=121
_globals['_SERVICEENVELOPE']._serialized_end=227
_globals['_MAPREPORT']._serialized_start=230

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/paxcount.proto\x12\x13meshtastic.protobuf\"5\n\x08Paxcount\x12\x0c\n\x04wifi\x18\x01 \x01(\r\x12\x0b\n\x03\x62le\x18\x02 \x01(\r\x12\x0e\n\x06uptime\x18\x03 \x01(\rBc\n\x13\x63om.geeksville.meshB\x0ePaxcountProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/paxcount.proto\x12\x13meshtastic.protobuf\"5\n\x08Paxcount\x12\x0c\n\x04wifi\x18\x01 \x01(\r\x12\x0b\n\x03\x62le\x18\x02 \x01(\r\x12\x0e\n\x06uptime\x18\x03 \x01(\rBd\n\x14org.meshtastic.protoB\x0ePaxcountProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.paxcount_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\016PaxcountProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\016PaxcountProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_PAXCOUNT']._serialized_start=59
_globals['_PAXCOUNT']._serialized_end=112
# @@protoc_insertion_point(module_scope)

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/portnums.proto\x12\x13meshtastic.protobuf*\xf6\x04\n\x07PortNum\x12\x0f\n\x0bUNKNOWN_APP\x10\x00\x12\x14\n\x10TEXT_MESSAGE_APP\x10\x01\x12\x17\n\x13REMOTE_HARDWARE_APP\x10\x02\x12\x10\n\x0cPOSITION_APP\x10\x03\x12\x10\n\x0cNODEINFO_APP\x10\x04\x12\x0f\n\x0bROUTING_APP\x10\x05\x12\r\n\tADMIN_APP\x10\x06\x12\x1f\n\x1bTEXT_MESSAGE_COMPRESSED_APP\x10\x07\x12\x10\n\x0cWAYPOINT_APP\x10\x08\x12\r\n\tAUDIO_APP\x10\t\x12\x18\n\x14\x44\x45TECTION_SENSOR_APP\x10\n\x12\r\n\tALERT_APP\x10\x0b\x12\x18\n\x14KEY_VERIFICATION_APP\x10\x0c\x12\r\n\tREPLY_APP\x10 \x12\x11\n\rIP_TUNNEL_APP\x10!\x12\x12\n\x0ePAXCOUNTER_APP\x10\"\x12\x0e\n\nSERIAL_APP\x10@\x12\x15\n\x11STORE_FORWARD_APP\x10\x41\x12\x12\n\x0eRANGE_TEST_APP\x10\x42\x12\x11\n\rTELEMETRY_APP\x10\x43\x12\x0b\n\x07ZPS_APP\x10\x44\x12\x11\n\rSIMULATOR_APP\x10\x45\x12\x12\n\x0eTRACEROUTE_APP\x10\x46\x12\x14\n\x10NEIGHBORINFO_APP\x10G\x12\x0f\n\x0b\x41TAK_PLUGIN\x10H\x12\x12\n\x0eMAP_REPORT_APP\x10I\x12\x13\n\x0fPOWERSTRESS_APP\x10J\x12\x18\n\x14RETICULUM_TUNNEL_APP\x10L\x12\x0f\n\x0b\x43\x41YENNE_APP\x10M\x12\x10\n\x0bPRIVATE_APP\x10\x80\x02\x12\x13\n\x0e\x41TAK_FORWARDER\x10\x81\x02\x12\x08\n\x03MAX\x10\xff\x03\x42]\n\x13\x63om.geeksville.meshB\x08PortnumsZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/portnums.proto\x12\x13meshtastic.protobuf*\xf6\x04\n\x07PortNum\x12\x0f\n\x0bUNKNOWN_APP\x10\x00\x12\x14\n\x10TEXT_MESSAGE_APP\x10\x01\x12\x17\n\x13REMOTE_HARDWARE_APP\x10\x02\x12\x10\n\x0cPOSITION_APP\x10\x03\x12\x10\n\x0cNODEINFO_APP\x10\x04\x12\x0f\n\x0bROUTING_APP\x10\x05\x12\r\n\tADMIN_APP\x10\x06\x12\x1f\n\x1bTEXT_MESSAGE_COMPRESSED_APP\x10\x07\x12\x10\n\x0cWAYPOINT_APP\x10\x08\x12\r\n\tAUDIO_APP\x10\t\x12\x18\n\x14\x44\x45TECTION_SENSOR_APP\x10\n\x12\r\n\tALERT_APP\x10\x0b\x12\x18\n\x14KEY_VERIFICATION_APP\x10\x0c\x12\r\n\tREPLY_APP\x10 \x12\x11\n\rIP_TUNNEL_APP\x10!\x12\x12\n\x0ePAXCOUNTER_APP\x10\"\x12\x0e\n\nSERIAL_APP\x10@\x12\x15\n\x11STORE_FORWARD_APP\x10\x41\x12\x12\n\x0eRANGE_TEST_APP\x10\x42\x12\x11\n\rTELEMETRY_APP\x10\x43\x12\x0b\n\x07ZPS_APP\x10\x44\x12\x11\n\rSIMULATOR_APP\x10\x45\x12\x12\n\x0eTRACEROUTE_APP\x10\x46\x12\x14\n\x10NEIGHBORINFO_APP\x10G\x12\x0f\n\x0b\x41TAK_PLUGIN\x10H\x12\x12\n\x0eMAP_REPORT_APP\x10I\x12\x13\n\x0fPOWERSTRESS_APP\x10J\x12\x18\n\x14RETICULUM_TUNNEL_APP\x10L\x12\x0f\n\x0b\x43\x41YENNE_APP\x10M\x12\x10\n\x0bPRIVATE_APP\x10\x80\x02\x12\x13\n\x0e\x41TAK_FORWARDER\x10\x81\x02\x12\x08\n\x03MAX\x10\xff\x03\x42^\n\x14org.meshtastic.protoB\x08PortnumsZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.portnums_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\010PortnumsZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\010PortnumsZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_PORTNUM']._serialized_start=60
_globals['_PORTNUM']._serialized_end=690
# @@protoc_insertion_point(module_scope)

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/powermon.proto\x12\x13meshtastic.protobuf\"\xe0\x01\n\x08PowerMon\"\xd3\x01\n\x05State\x12\x08\n\x04None\x10\x00\x12\x11\n\rCPU_DeepSleep\x10\x01\x12\x12\n\x0e\x43PU_LightSleep\x10\x02\x12\x0c\n\x08Vext1_On\x10\x04\x12\r\n\tLora_RXOn\x10\x08\x12\r\n\tLora_TXOn\x10\x10\x12\x11\n\rLora_RXActive\x10 \x12\t\n\x05\x42T_On\x10@\x12\x0b\n\x06LED_On\x10\x80\x01\x12\x0e\n\tScreen_On\x10\x80\x02\x12\x13\n\x0eScreen_Drawing\x10\x80\x04\x12\x0c\n\x07Wifi_On\x10\x80\x08\x12\x0f\n\nGPS_Active\x10\x80\x10\"\x88\x03\n\x12PowerStressMessage\x12;\n\x03\x63md\x18\x01 \x01(\x0e\x32..meshtastic.protobuf.PowerStressMessage.Opcode\x12\x13\n\x0bnum_seconds\x18\x02 \x01(\x02\"\x9f\x02\n\x06Opcode\x12\t\n\x05UNSET\x10\x00\x12\x0e\n\nPRINT_INFO\x10\x01\x12\x0f\n\x0b\x46ORCE_QUIET\x10\x02\x12\r\n\tEND_QUIET\x10\x03\x12\r\n\tSCREEN_ON\x10\x10\x12\x0e\n\nSCREEN_OFF\x10\x11\x12\x0c\n\x08\x43PU_IDLE\x10 \x12\x11\n\rCPU_DEEPSLEEP\x10!\x12\x0e\n\nCPU_FULLON\x10\"\x12\n\n\x06LED_ON\x10\x30\x12\x0b\n\x07LED_OFF\x10\x31\x12\x0c\n\x08LORA_OFF\x10@\x12\x0b\n\x07LORA_TX\x10\x41\x12\x0b\n\x07LORA_RX\x10\x42\x12\n\n\x06\x42T_OFF\x10P\x12\t\n\x05\x42T_ON\x10Q\x12\x0c\n\x08WIFI_OFF\x10`\x12\x0b\n\x07WIFI_ON\x10\x61\x12\x0b\n\x07GPS_OFF\x10p\x12\n\n\x06GPS_ON\x10qBc\n\x13\x63om.geeksville.meshB\x0ePowerMonProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"meshtastic/protobuf/powermon.proto\x12\x13meshtastic.protobuf\"\xe0\x01\n\x08PowerMon\"\xd3\x01\n\x05State\x12\x08\n\x04None\x10\x00\x12\x11\n\rCPU_DeepSleep\x10\x01\x12\x12\n\x0e\x43PU_LightSleep\x10\x02\x12\x0c\n\x08Vext1_On\x10\x04\x12\r\n\tLora_RXOn\x10\x08\x12\r\n\tLora_TXOn\x10\x10\x12\x11\n\rLora_RXActive\x10 \x12\t\n\x05\x42T_On\x10@\x12\x0b\n\x06LED_On\x10\x80\x01\x12\x0e\n\tScreen_On\x10\x80\x02\x12\x13\n\x0eScreen_Drawing\x10\x80\x04\x12\x0c\n\x07Wifi_On\x10\x80\x08\x12\x0f\n\nGPS_Active\x10\x80\x10\"\x88\x03\n\x12PowerStressMessage\x12;\n\x03\x63md\x18\x01 \x01(\x0e\x32..meshtastic.protobuf.PowerStressMessage.Opcode\x12\x13\n\x0bnum_seconds\x18\x02 \x01(\x02\"\x9f\x02\n\x06Opcode\x12\t\n\x05UNSET\x10\x00\x12\x0e\n\nPRINT_INFO\x10\x01\x12\x0f\n\x0b\x46ORCE_QUIET\x10\x02\x12\r\n\tEND_QUIET\x10\x03\x12\r\n\tSCREEN_ON\x10\x10\x12\x0e\n\nSCREEN_OFF\x10\x11\x12\x0c\n\x08\x43PU_IDLE\x10 \x12\x11\n\rCPU_DEEPSLEEP\x10!\x12\x0e\n\nCPU_FULLON\x10\"\x12\n\n\x06LED_ON\x10\x30\x12\x0b\n\x07LED_OFF\x10\x31\x12\x0c\n\x08LORA_OFF\x10@\x12\x0b\n\x07LORA_TX\x10\x41\x12\x0b\n\x07LORA_RX\x10\x42\x12\n\n\x06\x42T_OFF\x10P\x12\t\n\x05\x42T_ON\x10Q\x12\x0c\n\x08WIFI_OFF\x10`\x12\x0b\n\x07WIFI_ON\x10\x61\x12\x0b\n\x07GPS_OFF\x10p\x12\n\n\x06GPS_ON\x10qBd\n\x14org.meshtastic.protoB\x0ePowerMonProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.powermon_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\016PowerMonProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\016PowerMonProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_POWERMON']._serialized_start=60
_globals['_POWERMON']._serialized_end=284
_globals['_POWERMON_STATE']._serialized_start=73

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)meshtastic/protobuf/remote_hardware.proto\x12\x13meshtastic.protobuf\"\xdf\x01\n\x0fHardwareMessage\x12\x37\n\x04type\x18\x01 \x01(\x0e\x32).meshtastic.protobuf.HardwareMessage.Type\x12\x11\n\tgpio_mask\x18\x02 \x01(\x04\x12\x12\n\ngpio_value\x18\x03 \x01(\x04\"l\n\x04Type\x12\t\n\x05UNSET\x10\x00\x12\x0f\n\x0bWRITE_GPIOS\x10\x01\x12\x0f\n\x0bWATCH_GPIOS\x10\x02\x12\x11\n\rGPIOS_CHANGED\x10\x03\x12\x0e\n\nREAD_GPIOS\x10\x04\x12\x14\n\x10READ_GPIOS_REPLY\x10\x05\x42\x63\n\x13\x63om.geeksville.meshB\x0eRemoteHardwareZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)meshtastic/protobuf/remote_hardware.proto\x12\x13meshtastic.protobuf\"\xdf\x01\n\x0fHardwareMessage\x12\x37\n\x04type\x18\x01 \x01(\x0e\x32).meshtastic.protobuf.HardwareMessage.Type\x12\x11\n\tgpio_mask\x18\x02 \x01(\x04\x12\x12\n\ngpio_value\x18\x03 \x01(\x04\"l\n\x04Type\x12\t\n\x05UNSET\x10\x00\x12\x0f\n\x0bWRITE_GPIOS\x10\x01\x12\x0f\n\x0bWATCH_GPIOS\x10\x02\x12\x11\n\rGPIOS_CHANGED\x10\x03\x12\x0e\n\nREAD_GPIOS\x10\x04\x12\x14\n\x10READ_GPIOS_REPLY\x10\x05\x42\x64\n\x14org.meshtastic.protoB\x0eRemoteHardwareZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.remote_hardware_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\016RemoteHardwareZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\016RemoteHardwareZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_HARDWAREMESSAGE']._serialized_start=67
_globals['_HARDWAREMESSAGE']._serialized_end=290
_globals['_HARDWAREMESSAGE_TYPE']._serialized_start=182

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fmeshtastic/protobuf/rtttl.proto\x12\x13meshtastic.protobuf\"\x1f\n\x0bRTTTLConfig\x12\x10\n\x08ringtone\x18\x01 \x01(\tBf\n\x13\x63om.geeksville.meshB\x11RTTTLConfigProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fmeshtastic/protobuf/rtttl.proto\x12\x13meshtastic.protobuf\"\x1f\n\x0bRTTTLConfig\x12\x10\n\x08ringtone\x18\x01 \x01(\tBg\n\x14org.meshtastic.protoB\x11RTTTLConfigProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.rtttl_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\021RTTTLConfigProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\021RTTTLConfigProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_RTTTLCONFIG']._serialized_start=56
_globals['_RTTTLCONFIG']._serialized_end=87
# @@protoc_insertion_point(module_scope)

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&meshtastic/protobuf/storeforward.proto\x12\x13meshtastic.protobuf\"\xc0\x07\n\x0fStoreAndForward\x12@\n\x02rr\x18\x01 \x01(\x0e\x32\x34.meshtastic.protobuf.StoreAndForward.RequestResponse\x12@\n\x05stats\x18\x02 \x01(\x0b\x32/.meshtastic.protobuf.StoreAndForward.StatisticsH\x00\x12?\n\x07history\x18\x03 \x01(\x0b\x32,.meshtastic.protobuf.StoreAndForward.HistoryH\x00\x12\x43\n\theartbeat\x18\x04 \x01(\x0b\x32..meshtastic.protobuf.StoreAndForward.HeartbeatH\x00\x12\x0e\n\x04text\x18\x05 \x01(\x0cH\x00\x1a\xcd\x01\n\nStatistics\x12\x16\n\x0emessages_total\x18\x01 \x01(\r\x12\x16\n\x0emessages_saved\x18\x02 \x01(\r\x12\x14\n\x0cmessages_max\x18\x03 \x01(\r\x12\x0f\n\x07up_time\x18\x04 \x01(\r\x12\x10\n\x08requests\x18\x05 \x01(\r\x12\x18\n\x10requests_history\x18\x06 \x01(\r\x12\x11\n\theartbeat\x18\x07 \x01(\x08\x12\x12\n\nreturn_max\x18\x08 \x01(\r\x12\x15\n\rreturn_window\x18\t \x01(\r\x1aI\n\x07History\x12\x18\n\x10history_messages\x18\x01 \x01(\r\x12\x0e\n\x06window\x18\x02 \x01(\r\x12\x14\n\x0clast_request\x18\x03 \x01(\r\x1a.\n\tHeartbeat\x12\x0e\n\x06period\x18\x01 \x01(\r\x12\x11\n\tsecondary\x18\x02 \x01(\r\"\xbc\x02\n\x0fRequestResponse\x12\t\n\x05UNSET\x10\x00\x12\x10\n\x0cROUTER_ERROR\x10\x01\x12\x14\n\x10ROUTER_HEARTBEAT\x10\x02\x12\x0f\n\x0bROUTER_PING\x10\x03\x12\x0f\n\x0bROUTER_PONG\x10\x04\x12\x0f\n\x0bROUTER_BUSY\x10\x05\x12\x12\n\x0eROUTER_HISTORY\x10\x06\x12\x10\n\x0cROUTER_STATS\x10\x07\x12\x16\n\x12ROUTER_TEXT_DIRECT\x10\x08\x12\x19\n\x15ROUTER_TEXT_BROADCAST\x10\t\x12\x10\n\x0c\x43LIENT_ERROR\x10@\x12\x12\n\x0e\x43LIENT_HISTORY\x10\x41\x12\x10\n\x0c\x43LIENT_STATS\x10\x42\x12\x0f\n\x0b\x43LIENT_PING\x10\x43\x12\x0f\n\x0b\x43LIENT_PONG\x10\x44\x12\x10\n\x0c\x43LIENT_ABORT\x10jB\t\n\x07variantBj\n\x13\x63om.geeksville.meshB\x15StoreAndForwardProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&meshtastic/protobuf/storeforward.proto\x12\x13meshtastic.protobuf\"\xc0\x07\n\x0fStoreAndForward\x12@\n\x02rr\x18\x01 \x01(\x0e\x32\x34.meshtastic.protobuf.StoreAndForward.RequestResponse\x12@\n\x05stats\x18\x02 \x01(\x0b\x32/.meshtastic.protobuf.StoreAndForward.StatisticsH\x00\x12?\n\x07history\x18\x03 \x01(\x0b\x32,.meshtastic.protobuf.StoreAndForward.HistoryH\x00\x12\x43\n\theartbeat\x18\x04 \x01(\x0b\x32..meshtastic.protobuf.StoreAndForward.HeartbeatH\x00\x12\x0e\n\x04text\x18\x05 \x01(\x0cH\x00\x1a\xcd\x01\n\nStatistics\x12\x16\n\x0emessages_total\x18\x01 \x01(\r\x12\x16\n\x0emessages_saved\x18\x02 \x01(\r\x12\x14\n\x0cmessages_max\x18\x03 \x01(\r\x12\x0f\n\x07up_time\x18\x04 \x01(\r\x12\x10\n\x08requests\x18\x05 \x01(\r\x12\x18\n\x10requests_history\x18\x06 \x01(\r\x12\x11\n\theartbeat\x18\x07 \x01(\x08\x12\x12\n\nreturn_max\x18\x08 \x01(\r\x12\x15\n\rreturn_window\x18\t \x01(\r\x1aI\n\x07History\x12\x18\n\x10history_messages\x18\x01 \x01(\r\x12\x0e\n\x06window\x18\x02 \x01(\r\x12\x14\n\x0clast_request\x18\x03 \x01(\r\x1a.\n\tHeartbeat\x12\x0e\n\x06period\x18\x01 \x01(\r\x12\x11\n\tsecondary\x18\x02 \x01(\r\"\xbc\x02\n\x0fRequestResponse\x12\t\n\x05UNSET\x10\x00\x12\x10\n\x0cROUTER_ERROR\x10\x01\x12\x14\n\x10ROUTER_HEARTBEAT\x10\x02\x12\x0f\n\x0bROUTER_PING\x10\x03\x12\x0f\n\x0bROUTER_PONG\x10\x04\x12\x0f\n\x0bROUTER_BUSY\x10\x05\x12\x12\n\x0eROUTER_HISTORY\x10\x06\x12\x10\n\x0cROUTER_STATS\x10\x07\x12\x16\n\x12ROUTER_TEXT_DIRECT\x10\x08\x12\x19\n\x15ROUTER_TEXT_BROADCAST\x10\t\x12\x10\n\x0c\x43LIENT_ERROR\x10@\x12\x12\n\x0e\x43LIENT_HISTORY\x10\x41\x12\x10\n\x0c\x43LIENT_STATS\x10\x42\x12\x0f\n\x0b\x43LIENT_PING\x10\x43\x12\x0f\n\x0b\x43LIENT_PONG\x10\x44\x12\x10\n\x0c\x43LIENT_ABORT\x10jB\t\n\x07variantBk\n\x14org.meshtastic.protoB\x15StoreAndForwardProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.storeforward_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\025StoreAndForwardProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\025StoreAndForwardProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_STOREANDFORWARD']._serialized_start=64
_globals['_STOREANDFORWARD']._serialized_end=1024
_globals['_STOREANDFORWARD_STATISTICS']._serialized_start=366

File diff suppressed because one or more lines are too long

View File

@@ -199,6 +199,14 @@ class _TelemetrySensorTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wra
"""
SEN5X PM SENSORS
"""
TSL2561: _TelemetrySensorType.ValueType # 44
"""
TSL2561 light sensor
"""
BH1750: _TelemetrySensorType.ValueType # 45
"""
BH1750 light sensor
"""
class TelemetrySensorType(_TelemetrySensorType, metaclass=_TelemetrySensorTypeEnumTypeWrapper):
"""
@@ -381,6 +389,14 @@ SEN5X: TelemetrySensorType.ValueType # 43
"""
SEN5X PM SENSORS
"""
TSL2561: TelemetrySensorType.ValueType # 44
"""
TSL2561 light sensor
"""
BH1750: TelemetrySensorType.ValueType # 45
"""
BH1750 light sensor
"""
global___TelemetrySensorType = TelemetrySensorType
@typing.final
@@ -1018,6 +1034,7 @@ class LocalStats(google.protobuf.message.Message):
NUM_TX_RELAY_CANCELED_FIELD_NUMBER: builtins.int
HEAP_TOTAL_BYTES_FIELD_NUMBER: builtins.int
HEAP_FREE_BYTES_FIELD_NUMBER: builtins.int
NUM_TX_DROPPED_FIELD_NUMBER: builtins.int
uptime_seconds: builtins.int
"""
How long the device has been running since the last reboot (in seconds)
@@ -1072,6 +1089,10 @@ class LocalStats(google.protobuf.message.Message):
"""
Number of bytes free in the heap
"""
num_tx_dropped: builtins.int
"""
Number of packets that were dropped because the transmit queue was full.
"""
def __init__(
self,
*,
@@ -1088,8 +1109,9 @@ class LocalStats(google.protobuf.message.Message):
num_tx_relay_canceled: builtins.int = ...,
heap_total_bytes: builtins.int = ...,
heap_free_bytes: builtins.int = ...,
num_tx_dropped: builtins.int = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["air_util_tx", b"air_util_tx", "channel_utilization", b"channel_utilization", "heap_free_bytes", b"heap_free_bytes", "heap_total_bytes", b"heap_total_bytes", "num_online_nodes", b"num_online_nodes", "num_packets_rx", b"num_packets_rx", "num_packets_rx_bad", b"num_packets_rx_bad", "num_packets_tx", b"num_packets_tx", "num_rx_dupe", b"num_rx_dupe", "num_total_nodes", b"num_total_nodes", "num_tx_relay", b"num_tx_relay", "num_tx_relay_canceled", b"num_tx_relay_canceled", "uptime_seconds", b"uptime_seconds"]) -> None: ...
def ClearField(self, field_name: typing.Literal["air_util_tx", b"air_util_tx", "channel_utilization", b"channel_utilization", "heap_free_bytes", b"heap_free_bytes", "heap_total_bytes", b"heap_total_bytes", "num_online_nodes", b"num_online_nodes", "num_packets_rx", b"num_packets_rx", "num_packets_rx_bad", b"num_packets_rx_bad", "num_packets_tx", b"num_packets_tx", "num_rx_dupe", b"num_rx_dupe", "num_total_nodes", b"num_total_nodes", "num_tx_dropped", b"num_tx_dropped", "num_tx_relay", b"num_tx_relay", "num_tx_relay_canceled", b"num_tx_relay_canceled", "uptime_seconds", b"uptime_seconds"]) -> None: ...
global___LocalStats = LocalStats

View File

@@ -13,14 +13,14 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n meshtastic/protobuf/xmodem.proto\x12\x13meshtastic.protobuf\"\xbf\x01\n\x06XModem\x12\x34\n\x07\x63ontrol\x18\x01 \x01(\x0e\x32#.meshtastic.protobuf.XModem.Control\x12\x0b\n\x03seq\x18\x02 \x01(\r\x12\r\n\x05\x63rc16\x18\x03 \x01(\r\x12\x0e\n\x06\x62uffer\x18\x04 \x01(\x0c\"S\n\x07\x43ontrol\x12\x07\n\x03NUL\x10\x00\x12\x07\n\x03SOH\x10\x01\x12\x07\n\x03STX\x10\x02\x12\x07\n\x03\x45OT\x10\x04\x12\x07\n\x03\x41\x43K\x10\x06\x12\x07\n\x03NAK\x10\x15\x12\x07\n\x03\x43\x41N\x10\x18\x12\t\n\x05\x43TRLZ\x10\x1a\x42\x61\n\x13\x63om.geeksville.meshB\x0cXmodemProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n meshtastic/protobuf/xmodem.proto\x12\x13meshtastic.protobuf\"\xbf\x01\n\x06XModem\x12\x34\n\x07\x63ontrol\x18\x01 \x01(\x0e\x32#.meshtastic.protobuf.XModem.Control\x12\x0b\n\x03seq\x18\x02 \x01(\r\x12\r\n\x05\x63rc16\x18\x03 \x01(\r\x12\x0e\n\x06\x62uffer\x18\x04 \x01(\x0c\"S\n\x07\x43ontrol\x12\x07\n\x03NUL\x10\x00\x12\x07\n\x03SOH\x10\x01\x12\x07\n\x03STX\x10\x02\x12\x07\n\x03\x45OT\x10\x04\x12\x07\n\x03\x41\x43K\x10\x06\x12\x07\n\x03NAK\x10\x15\x12\x07\n\x03\x43\x41N\x10\x18\x12\t\n\x05\x43TRLZ\x10\x1a\x42\x62\n\x14org.meshtastic.protoB\x0cXmodemProtosZ\"github.com/meshtastic/go/generated\xaa\x02\x14Meshtastic.Protobufs\xba\x02\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'meshtastic.protobuf.xmodem_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023com.geeksville.meshB\014XmodemProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
DESCRIPTOR._serialized_options = b'\n\024org.meshtastic.protoB\014XmodemProtosZ\"github.com/meshtastic/go/generated\252\002\024Meshtastic.Protobufs\272\002\000'
_globals['_XMODEM']._serialized_start=58
_globals['_XMODEM']._serialized_end=249
_globals['_XMODEM_CONTROL']._serialized_start=166

57
meshview/__version__.py Normal file
View File

@@ -0,0 +1,57 @@
"""Version information for MeshView."""
import subprocess
from pathlib import Path
__version__ = "3.0.4"
__release_date__ = "2026-1-24"
def get_git_revision():
"""Get the current git revision hash."""
try:
repo_dir = Path(__file__).parent.parent
result = subprocess.run(
["git", "rev-parse", "HEAD"],
capture_output=True,
text=True,
check=True,
cwd=repo_dir,
)
return result.stdout.strip()
except (subprocess.CalledProcessError, FileNotFoundError):
return "unknown"
def get_git_revision_short():
"""Get the short git revision hash."""
try:
repo_dir = Path(__file__).parent.parent
result = subprocess.run(
["git", "rev-parse", "--short", "HEAD"],
capture_output=True,
text=True,
check=True,
cwd=repo_dir,
)
return result.stdout.strip()
except (subprocess.CalledProcessError, FileNotFoundError):
return "unknown"
def get_version_info():
"""Get complete version information."""
return {
"version": __version__,
"release_date": __release_date__,
"git_revision": get_git_revision(),
"git_revision_short": get_git_revision_short(),
}
# Cache git info at import time for performance
_git_revision = get_git_revision()
_git_revision_short = get_git_revision_short()
# Full version string for display
__version_string__ = f"{__version__} ~ {__release_date__}"

View File

@@ -1,10 +1,12 @@
import configparser
import argparse
import configparser
# Parse command-line arguments
parser = argparse.ArgumentParser(description="MeshView Configuration Loader")
parser.add_argument("--config", type=str, default="config.ini", help="Path to config.ini file (default: config.ini)")
args = parser.parse_args()
parser.add_argument(
"--config", type=str, default="config.ini", help="Path to config.ini file (default: config.ini)"
)
args, _ = parser.parse_known_args()
# Initialize config parser
config_parser = configparser.ConfigParser()
@@ -12,4 +14,3 @@ if not config_parser.read(args.config):
raise FileNotFoundError(f"Config file '{args.config}' not found! Ensure the file exists.")
CONFIG = {section: dict(config_parser.items(section)) for section in config_parser.sections()}

View File

@@ -1,32 +1,34 @@
from sqlalchemy.engine.url import make_url
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from meshview import models
from sqlalchemy.ext.asyncio import async_sessionmaker
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
engine = None
async_session = None
def init_database(database_connection_string, read_only=False):
def init_database(database_connection_string):
global engine, async_session
kwargs = {"echo": False}
url = make_url(database_connection_string)
connect_args = {}
if database_connection_string.startswith("sqlite"):
if read_only:
# Ensure SQLite is opened in read-only mode
database_connection_string += "?mode=ro"
kwargs["connect_args"] = {"uri": True}
else:
kwargs["connect_args"] = {"timeout": 60}
else:
kwargs["pool_size"] = 20
kwargs["max_overflow"] = 50
if url.drivername.startswith("sqlite"):
query = dict(url.query)
query.setdefault("mode", "ro")
url = url.set(query=query)
connect_args["uri"] = True
if connect_args:
kwargs["connect_args"] = connect_args
engine = create_async_engine(url, **kwargs)
async_session = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
)
engine = create_async_engine(database_connection_string, **kwargs)
async_session = async_sessionmaker( bind=engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def create_tables():
async with engine.begin() as conn:

View File

@@ -1,16 +1,16 @@
from meshtastic.protobuf.mqtt_pb2 import MapReport
from meshtastic.protobuf.portnums_pb2 import PortNum
from google.protobuf.message import DecodeError
from meshtastic.protobuf.mesh_pb2 import (
Position,
MeshPacket,
NeighborInfo,
NodeInfo,
User,
Position,
RouteDiscovery,
Routing,
MeshPacket,
User,
)
from meshtastic.protobuf.mqtt_pb2 import MapReport
from meshtastic.protobuf.portnums_pb2 import PortNum
from meshtastic.protobuf.telemetry_pb2 import Telemetry
from google.protobuf.message import DecodeError
def text_message(payload):
@@ -25,7 +25,7 @@ DECODE_MAP = {
PortNum.TRACEROUTE_APP: RouteDiscovery.FromString,
PortNum.ROUTING_APP: Routing.FromString,
PortNum.TEXT_MESSAGE_APP: text_message,
PortNum.MAP_REPORT_APP: MapReport.FromString
PortNum.MAP_REPORT_APP: MapReport.FromString,
}

212
meshview/lang/en.json Normal file
View File

@@ -0,0 +1,212 @@
{
"base": {
"chat": "Chat",
"nodes": "Nodes",
"everything": "See Everything",
"graphs": "Mesh Graphs",
"net": "Weekly Net",
"map": "Live Map",
"stats": "Stats",
"top": "Top Traffic Nodes",
"footer": "Visit <strong><a href=\"https://github.com/pablorevilla-meshtastic/meshview\">Meshview</a></strong> on GitHub",
"node id": "Node id",
"go to node": "Go to Node",
"all": "All",
"portnum_options": {
"1": "Text Message",
"3": "Position",
"4": "Node Info",
"67": "Telemetry",
"70": "Traceroute",
"71": "Neighbor Info"
}
},
"chat": {
"chat_title": "Chats:",
"replying_to": "Replying to:",
"view_packet_details": "View packet details"
},
"nodelist": {
"search_placeholder": "Search by name or ID...",
"all_roles": "All Roles",
"all_channels": "All Channels",
"all_hw": "All HW Models",
"all_firmware": "All Firmware",
"show_favorites": "⭐ Show Favorites",
"show_all": "⭐ Show All",
"export_csv": "Export CSV",
"clear_filters": "Clear Filters",
"showing_nodes": "Showing",
"nodes_suffix": "nodes",
"loading_nodes": "Loading nodes...",
"error_loading_nodes": "Error loading nodes",
"no_nodes_found": "No nodes found",
"short_name": "Short",
"long_name": "Long Name",
"hw_model": "HW Model",
"firmware": "Firmware",
"role": "Role",
"last_lat": "Last Latitude",
"last_long": "Last Longitude",
"channel": "Channel",
"last_seen": "Last Seen",
"favorite": "Favorite",
"time_just_now": "just now",
"time_min_ago": "min ago",
"time_hr_ago": "hr ago",
"time_day_ago": "day ago",
"time_days_ago": "days ago"
},
"net": {
"net_title": "Weekly Net:",
"total_messages": "Number of messages:",
"view_packet_details": "More details"
},
"map": {
"show_routers_only": "Show Routers Only",
"share_view": "Share This View",
"reset_filters": "Reset Filters To Defaults",
"channel_label": "Channel:",
"model_label": "Model:",
"role_label": "Role:",
"last_seen": "Last seen:",
"firmware": "Firmware:",
"link_copied": "Link Copied!",
"legend_traceroute": "Traceroute (with arrows)",
"legend_neighbor": "Neighbor"
},
"stats":
{
"mesh_stats_summary": "Mesh Statistics - Summary (all available in Database)",
"total_nodes": "Total Nodes",
"total_packets": "Total Packets",
"total_packets_seen": "Total Packets Seen",
"packets_per_day_all": "Packets per Day - All Ports (Last 14 Days)",
"packets_per_day_text": "Packets per Day - Text Messages (Port 1, Last 14 Days)",
"packets_per_hour_all": "Packets per Hour - All Ports",
"packets_per_hour_text": "Packets per Hour - Text Messages (Port 1)",
"packet_types_last_24h": "Packet Types - Last 24 Hours",
"hardware_breakdown": "Hardware Breakdown",
"role_breakdown": "Role Breakdown",
"channel_breakdown": "Channel Breakdown",
"expand_chart": "Expand Chart",
"export_csv": "Export CSV",
"all_channels": "All Channels",
"node_id": "Node ID"
},
"top": {
"top_traffic_nodes": "Top Nodes Traffic",
"channel": "Channel",
"search": "Search",
"search_placeholder": "Search nodes...",
"long_name": "Long Name",
"short_name": "Short Name",
"packets_sent": "Sent (24h)",
"times_seen": "Seen (24h)",
"avg_gateways": "Avg Gateways",
"showing_nodes": "Showing",
"nodes_suffix": "nodes"
},
"nodegraph":
{
"channel_label": "Channel:",
"search_node_placeholder": "Search node...",
"search_button": "Search",
"long_name_label": "Long Name:",
"short_name_label": "Short Name:",
"role_label": "Role:",
"hw_model_label": "Hardware Model:",
"node_not_found": "Node not found in current channel!"
},
"firehose":
{
"live_feed": "📡 Live Feed",
"pause": "Pause",
"resume": "Resume",
"time": "Time",
"packet_id": "Packet ID",
"from": "From",
"to": "To",
"port": "Port",
"links": "Links",
"unknown_app": "UNKNOWN APP",
"text_message": "Text Message",
"position": "Position",
"node_info": "Node Info",
"routing": "Routing",
"administration": "Administration",
"waypoint": "Waypoint",
"store_forward": "Store Forward",
"telemetry": "Telemetry",
"trace_route": "Trace Route",
"neighbor_info": "Neighbor Info",
"direct_to_mqtt": "direct to MQTT",
"all": "All",
"map": "Map",
"graph": "Graph"
},
"node": {
"specifications": "Specifications",
"node_id": "Node ID",
"long_name": "Long Name",
"short_name": "Short Name",
"hw_model": "Hardware Model",
"firmware": "Firmware",
"role": "Role",
"channel": "Channel",
"latitude": "Latitude",
"longitude": "Longitude",
"last_update": "Last Update",
"battery_voltage": "Battery & Voltage",
"air_channel": "Air & Channel Utilization",
"environment": "Environment Metrics",
"neighbors_chart": "Neighbors (Signal-to-Noise)",
"expand": "Expand",
"export_csv": "Export CSV",
"time": "Time",
"packet_id": "Packet ID",
"from": "From",
"to": "To",
"port": "Port",
"direct_to_mqtt": "Direct to MQTT",
"all_broadcast": "All",
"statistics": "Statistics",
"last_24h": "24h",
"packets_sent": "Packets sent",
"times_seen": "Times seen"
},
"packet": {
"loading": "Loading packet information...",
"packet_id_label": "Packet ID",
"from_node": "From Node",
"to_node": "To Node",
"channel": "Channel",
"port": "Port",
"raw_payload": "Raw Payload",
"decoded_telemetry": "Decoded Telemetry",
"location": "Location",
"seen_by": "Seen By",
"gateway": "Gateway",
"rssi": "RSSI",
"snr": "SNR",
"hops": "Hop",
"time": "Time",
"packet_source": "Packet Source",
"distance": "Distance",
"node_id_short": "Node ID",
"all_broadcast": "All",
"direct_to_mqtt": "Direct to MQTT"
}
}

197
meshview/lang/es.json Normal file
View File

@@ -0,0 +1,197 @@
{
"base": {
"chat": "Conversaciones",
"nodes": "Nodos",
"everything": "Mostrar todo",
"graphs": "Gráficos de la Malla",
"net": "Red Semanal",
"map": "Mapa en Vivo",
"stats": "Estadísticas",
"top": "Nodos con Mayor Tráfico",
"footer": "Visita <strong><a href=\"https://github.com/pablorevilla-meshtastic/meshview\">Meshview</a></strong> en Github.",
"node_id": "ID de Nodo",
"go_to_node": "Ir al nodo",
"all": "Todos",
"portnum_options": {
"1": "Mensaje de Texto",
"3": "Ubicación",
"4": "Información del Nodo",
"67": "Telemetría",
"70": "Traceroute",
"71": "Información de Vecinos"
}
},
"chat": {
"chat_title": "Conversaciones:",
"replying_to": "Respondiendo a:",
"view_packet_details": "Ver detalles del paquete"
},
"nodelist": {
"search_placeholder": "Buscar por nombre o ID...",
"all_roles": "Todos los roles",
"all_channels": "Todos los canales",
"all_hw": "Todos los modelos",
"all_firmware": "Todo el firmware",
"show_favorites": "⭐ Mostrar favoritos",
"show_all": "⭐ Mostrar todos",
"export_csv": "Exportar CSV",
"clear_filters": "Limpiar filtros",
"showing_nodes": "Mostrando",
"nodes_suffix": "nodos",
"loading_nodes": "Cargando nodos...",
"error_loading_nodes": "Error al cargar nodos",
"no_nodes_found": "No se encontraron nodos",
"short_name": "Corto",
"long_name": "Nombre largo",
"hw_model": "Modelo HW",
"firmware": "Firmware",
"role": "Rol",
"last_lat": "Última latitud",
"last_long": "Última longitud",
"channel": "Canal",
"last_seen": "Última vez visto",
"favorite": "Favorito",
"time_just_now": "justo ahora",
"time_min_ago": "min atrás",
"time_hr_ago": "h atrás",
"time_day_ago": "día atrás",
"time_days_ago": "días atrás"
},
"net": {
"net_title": "Red Semanal:",
"total_messages": "Número de mensajes:",
"view_packet_details": "Más Detalles"
},
"map": {
"filter_routers_only": "Mostrar solo enrutadores",
"share_view": "Compartir esta vista",
"reset_filters": "Restablecer filtros",
"channel_label": "Canal:",
"model_label": "Modelo:",
"role_label": "Rol:",
"last_seen": "Visto por última vez:",
"firmware": "Firmware:",
"link_copied": "¡Enlace copiado!",
"legend_traceroute": "Ruta de traceroute (flechas de dirección)",
"legend_neighbor": "Vínculo de vecinos"
},
"stats": {
"mesh_stats_summary": "Estadísticas de la Malla - Resumen (completas en la base de datos)",
"total_nodes": "Nodos Totales",
"total_packets": "Paquetes Totales",
"total_packets_seen": "Paquetes Totales Vistos",
"packets_per_day_all": "Paquetes por Día - Todos los Puertos (Últimos 14 Días)",
"packets_per_day_text": "Paquetes por Día - Mensajes de Texto (Puerto 1, Últimos 14 Días)",
"packets_per_hour_all": "Paquetes por Hora - Todos los Puertos",
"packets_per_hour_text": "Paquetes por Hora - Mensajes de Texto (Puerto 1)",
"packet_types_last_24h": "Tipos de Paquetes - Últimas 24 Horas",
"hardware_breakdown": "Distribución de Hardware",
"role_breakdown": "Distribución de Roles",
"channel_breakdown": "Distribución de Canales",
"expand_chart": "Ampliar Gráfico",
"export_csv": "Exportar CSV",
"all_channels": "Todos los Canales"
},
"top": {
"top_traffic_nodes": "Tráfico de Nodos (24h)",
"channel": "Canal",
"search": "Buscar",
"search_placeholder": "Buscar nodos...",
"long_name": "Nombre Largo",
"short_name": "Nombre Corto",
"packets_sent": "Enviados (24h)",
"times_seen": "Visto (24h)",
"avg_gateways": "Promedio de Gateways",
"showing_nodes": "Mostrando",
"nodes_suffix": "nodos"
},
"nodegraph": {
"channel_label": "Canal:",
"search_placeholder": "Buscar nodo...",
"search_button": "Buscar",
"long_name_label": "Nombre completo:",
"short_name_label": "Nombre corto:",
"role_label": "Rol:",
"hw_model_label": "Modelo de hardware:",
"traceroute": "Traceroute",
"neighbor": "Vecino",
"other": "Otro",
"unknown": "Desconocido",
"node_not_found": "¡Nodo no encontrado en el canal actual!"
},
"firehose": {
"live_feed": "📡 Flujo en vivo",
"pause": "Pausar",
"resume": "Reanudar",
"time": "Hora",
"packet_id": "ID de paquete",
"from": "De",
"to": "A",
"port": "Puerto",
"direct_to_mqtt": "Directo a MQTT",
"all_broadcast": "Todos"
},
"node": {
"specifications": "Especificaciones",
"node_id": "ID de Nodo",
"long_name": "Nombre Largo",
"short_name": "Nombre Corto",
"hw_model": "Modelo de Hardware",
"firmware": "Firmware",
"role": "Rol",
"channel": "Canal",
"latitude": "Latitud",
"longitude": "Longitud",
"last_update": "Última Actualización",
"battery_voltage": "Batería y voltaje",
"air_channel": "Utilización del aire y del canal",
"environment": "Métricas Ambientales",
"neighbors_chart": "Vecinos (Relación Señal/Ruido)",
"expand": "Ampliar",
"export_csv": "Exportar CSV",
"time": "Hora",
"packet_id": "ID del Paquete",
"from": "De",
"to": "A",
"port": "Puerto",
"direct_to_mqtt": "Directo a MQTT",
"all_broadcast": "Todos",
"statistics": "Estadísticas",
"last_24h": "24h",
"packets_sent": "Paquetes enviados",
"times_seen": "Veces visto"
},
"packet": {
"loading": "Cargando información del paquete...",
"packet_id_label": "ID del Paquete",
"from_node": "De",
"to_node": "A",
"channel": "Canal",
"port": "Puerto",
"raw_payload": "Payload sin procesar",
"decoded_telemetry": "Telemetría Decodificada",
"location": "Ubicación",
"seen_by": "Visto por",
"gateway": "Gateway",
"rssi": "RSSI",
"snr": "SNR",
"hops": "Saltos",
"time": "Hora",
"packet_source": "Origen del Paquete",
"distance": "Distancia",
"node_id_short": "ID de Nodo",
"all_broadcast": "Todos",
"direct_to_mqtt": "Directo a MQTT",
"signal": "Señal"
}
}

248
meshview/migrations.py Normal file
View File

@@ -0,0 +1,248 @@
"""
Database migration management for MeshView.
This module provides utilities for:
- Running Alembic migrations programmatically
- Checking database schema versions
- Coordinating migrations between writer and reader apps
"""
import asyncio
import logging
from pathlib import Path
from alembic.config import Config
from alembic.runtime.migration import MigrationContext
from alembic.script import ScriptDirectory
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncEngine
from alembic import command
logger = logging.getLogger(__name__)
def get_alembic_config(database_url: str) -> Config:
"""
Get Alembic configuration with the database URL set.
Args:
database_url: SQLAlchemy database connection string
Returns:
Configured Alembic Config object
"""
# Get the alembic.ini path (in project root)
alembic_ini = Path(__file__).parent.parent / "alembic.ini"
config = Config(str(alembic_ini))
config.set_main_option("sqlalchemy.url", database_url)
return config
async def get_current_revision(engine: AsyncEngine) -> str | None:
"""
Get the current database schema revision.
Args:
engine: Async SQLAlchemy engine
Returns:
Current revision string, or None if no migrations applied
"""
async with engine.connect() as connection:
def _get_revision(conn):
context = MigrationContext.configure(conn)
return context.get_current_revision()
revision = await connection.run_sync(_get_revision)
return revision
async def get_head_revision(database_url: str) -> str | None:
"""
Get the head (latest) revision from migration scripts.
Args:
database_url: Database connection string
Returns:
Head revision string, or None if no migrations exist
"""
config = get_alembic_config(database_url)
script_dir = ScriptDirectory.from_config(config)
head = script_dir.get_current_head()
return head
async def is_database_up_to_date(engine: AsyncEngine, database_url: str) -> bool:
"""
Check if database is at the latest schema version.
Args:
engine: Async SQLAlchemy engine
database_url: Database connection string
Returns:
True if database is up to date, False otherwise
"""
current = await get_current_revision(engine)
head = await get_head_revision(database_url)
# If there are no migrations yet, consider it up to date
if head is None:
return True
return current == head
def run_migrations(database_url: str) -> None:
"""
Run all pending migrations to bring database up to date.
This is a synchronous operation that runs Alembic migrations.
Should be called by the writer app on startup.
Args:
database_url: Database connection string
"""
logger.info("Running database migrations...")
import sys
sys.stdout.flush()
config = get_alembic_config(database_url)
try:
# Run migrations to head
logger.info("Calling alembic upgrade command...")
sys.stdout.flush()
command.upgrade(config, "head")
logger.info("Database migrations completed successfully")
sys.stdout.flush()
except Exception as e:
logger.error(f"Error running migrations: {e}")
raise
async def wait_for_migrations(
engine: AsyncEngine, database_url: str, max_retries: int = 30, retry_delay: int = 2
) -> bool:
"""
Wait for database migrations to complete.
This should be called by the reader app to wait until
the database schema is up to date before proceeding.
Args:
engine: Async SQLAlchemy engine
database_url: Database connection string
max_retries: Maximum number of retry attempts
retry_delay: Seconds to wait between retries
Returns:
True if database is up to date, False if max retries exceeded
"""
for attempt in range(max_retries):
try:
if await is_database_up_to_date(engine, database_url):
logger.info("Database schema is up to date")
return True
current = await get_current_revision(engine)
head = await get_head_revision(database_url)
logger.info(
f"Database schema not up to date (current: {current}, head: {head}). "
f"Waiting... (attempt {attempt + 1}/{max_retries})"
)
await asyncio.sleep(retry_delay)
except Exception as e:
logger.warning(
f"Error checking database version (attempt {attempt + 1}/{max_retries}): {e}"
)
await asyncio.sleep(retry_delay)
logger.error(f"Database schema not up to date after {max_retries} attempts")
return False
async def create_migration_status_table(engine: AsyncEngine) -> None:
"""
Create a simple status table for migration coordination.
This table can be used to signal when migrations are in progress.
Args:
engine: Async SQLAlchemy engine
"""
async with engine.begin() as conn:
await conn.execute(
text("""
CREATE TABLE IF NOT EXISTS migration_status (
id INTEGER PRIMARY KEY CHECK (id = 1),
in_progress BOOLEAN NOT NULL DEFAULT FALSE,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
)
result = await conn.execute(
text("""
SELECT 1 FROM migration_status WHERE id = 1
""")
)
if result.first() is None:
await conn.execute(
text("""
INSERT INTO migration_status (id, in_progress)
VALUES (1, FALSE)
""")
)
async def set_migration_in_progress(engine: AsyncEngine, in_progress: bool) -> None:
"""
Set the migration in-progress flag.
Args:
engine: Async SQLAlchemy engine
in_progress: True if migration is in progress, False otherwise
"""
async with engine.begin() as conn:
await conn.execute(
text("""
UPDATE migration_status
SET in_progress = :in_progress,
updated_at = CURRENT_TIMESTAMP
WHERE id = 1
"""),
{"in_progress": in_progress},
)
async def is_migration_in_progress(engine: AsyncEngine) -> bool:
"""
Check if a migration is currently in progress.
Args:
engine: Async SQLAlchemy engine
Returns:
True if migration is in progress, False otherwise
"""
try:
async with engine.connect() as conn:
result = await conn.execute(
text("SELECT in_progress FROM migration_status WHERE id = 1")
)
row = result.fetchone()
return bool(row[0]) if row else False
except Exception:
# If table doesn't exist or query fails, assume no migration in progress
return False

View File

@@ -1,8 +1,6 @@
from datetime import datetime
from sqlalchemy.orm import DeclarativeBase, foreign
from sqlalchemy import BigInteger, ForeignKey, Index, desc
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import mapped_column, relationship, Mapped
from sqlalchemy import ForeignKey, BigInteger, Index, desc
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
class Base(AsyncAttrs, DeclarativeBase):
@@ -22,18 +20,17 @@ class Node(Base):
last_lat: Mapped[int] = mapped_column(BigInteger, nullable=True)
last_long: Mapped[int] = mapped_column(BigInteger, nullable=True)
channel: Mapped[str] = mapped_column(nullable=True)
last_update: Mapped[datetime] = mapped_column(nullable=True)
first_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
last_seen_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
Index("idx_node_node_id", "node_id"),
Index("idx_node_first_seen_us", "first_seen_us"),
Index("idx_node_last_seen_us", "last_seen_us"),
)
def to_dict(self):
return {
column.name: getattr(self, column.name)
for column in self.__table__.columns
if column.name != "last_update"
}
return {column.name: getattr(self, column.name) for column in self.__table__.columns}
class Packet(Base):
@@ -51,13 +48,14 @@ class Packet(Base):
overlaps="from_node",
)
payload: Mapped[bytes] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
channel: Mapped[str] = mapped_column(nullable=True)
__table_args__ = (
Index("idx_packet_from_node_id", "from_node_id"),
Index("idx_packet_to_node_id", "to_node_id"),
Index("idx_packet_import_time", desc("import_time")),
Index("idx_packet_import_time_us", desc("import_time_us")),
Index("idx_packet_from_node_time_us", "from_node_id", desc("import_time_us")),
)
@@ -77,15 +75,19 @@ class PacketSeen(Base):
rx_snr: Mapped[float] = mapped_column(nullable=True)
rx_rssi: Mapped[int] = mapped_column(nullable=True)
topic: Mapped[str] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
Index("idx_packet_seen_node_id", "node_id"),
# Index for /top endpoint performance - JOIN on packet_id
Index("idx_packet_seen_packet_id", "packet_id"),
Index("idx_packet_seen_import_time_us", "import_time_us"),
)
class Traceroute(Base):
__tablename__ = "traceroute"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
packet_id = mapped_column(ForeignKey("packet.id"))
packet: Mapped["Packet"] = relationship(
@@ -94,4 +96,11 @@ class Traceroute(Base):
gateway_node_id: Mapped[int] = mapped_column(BigInteger, nullable=True)
done: Mapped[bool] = mapped_column(nullable=True)
route: Mapped[bytes] = mapped_column(nullable=True)
import_time: Mapped[datetime] = mapped_column(nullable=True)
route_return: Mapped[bytes] = mapped_column(nullable=True)
import_time_us: Mapped[int] = mapped_column(BigInteger, nullable=True)
__table_args__ = (
Index("idx_traceroute_packet_id", "packet_id"),
Index("idx_traceroute_import_time_us", "import_time_us"),
)

View File

@@ -1,15 +1,35 @@
from sqlalchemy.engine.url import make_url
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from sqlalchemy import event
from meshview import models
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
def init_database(database_connection_string):
global engine, async_session
kwargs = {}
if not database_connection_string.startswith('sqlite'):
kwargs['pool_size'] = 20
kwargs['max_overflow'] = 50
engine = create_async_engine(database_connection_string, echo=False, connect_args={"timeout": 60})
url = make_url(database_connection_string)
kwargs = {"echo": False}
if url.drivername.startswith("sqlite"):
kwargs["connect_args"] = {"timeout": 900} # seconds
engine = create_async_engine(url, **kwargs)
# Enforce SQLite pragmas on every new DB connection
if url.drivername.startswith("sqlite"):
@event.listens_for(engine.sync_engine, "connect")
def _set_sqlite_pragmas(dbapi_conn, _):
cursor = dbapi_conn.cursor()
cursor.execute("PRAGMA journal_mode=WAL;")
cursor.execute("PRAGMA busy_timeout=900000;") # ms
cursor.execute("PRAGMA synchronous=NORMAL;")
cursor.close()
async_session = async_sessionmaker(engine, expire_on_commit=False)
async def create_tables():
async with engine.begin() as conn:
await conn.run_sync(models.Base.metadata.create_all)

View File

@@ -1,32 +1,106 @@
import base64
import asyncio
import base64
import logging
import random
import time
import aiomqtt
from google.protobuf.message import DecodeError
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from google.protobuf.message import DecodeError
from meshtastic.protobuf.mqtt_pb2 import ServiceEnvelope
from meshview.config import CONFIG
KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
PRIMARY_KEY = base64.b64decode("1PG7OiApB1nwvP+rz05pAQ==")
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(filename)s:%(lineno)d [pid:%(process)d] %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)
def decrypt(packet):
if packet.HasField("decoded"):
return
def _parse_skip_node_ids():
mqtt_config = CONFIG.get("mqtt", {})
raw_value = mqtt_config.get("skip_node_ids", "")
if not raw_value:
return set()
if isinstance(raw_value, str):
raw_value = raw_value.strip()
if not raw_value:
return set()
values = [v.strip() for v in raw_value.split(",") if v.strip()]
else:
values = [raw_value]
skip_ids = set()
for value in values:
try:
skip_ids.add(int(value, 0))
except (TypeError, ValueError):
logger.warning("Invalid node id in mqtt.skip_node_ids: %s", value)
return skip_ids
def _parse_secondary_keys():
mqtt_config = CONFIG.get("mqtt", {})
raw_value = mqtt_config.get("secondary_keys", "")
if not raw_value:
return []
if isinstance(raw_value, str):
raw_value = raw_value.strip()
if not raw_value:
return []
values = [v.strip() for v in raw_value.split(",") if v.strip()]
else:
values = [raw_value]
keys = []
for value in values:
try:
keys.append(base64.b64decode(value))
except (TypeError, ValueError):
logger.warning("Invalid base64 key in mqtt.secondary_keys: %s", value)
return keys
SKIP_NODE_IDS = _parse_skip_node_ids()
SECONDARY_KEYS = _parse_secondary_keys()
def _try_decrypt(packet, key):
packet_id = packet.id.to_bytes(8, "little")
from_node_id = getattr(packet, "from").to_bytes(8, "little")
nonce = packet_id + from_node_id
cipher = Cipher(algorithms.AES(KEY), modes.CTR(nonce))
cipher = Cipher(algorithms.AES(key), modes.CTR(nonce))
decryptor = cipher.decryptor()
raw_proto = decryptor.update(packet.encrypted) + decryptor.finalize()
try:
packet.decoded.ParseFromString(raw_proto)
except DecodeError:
pass
return False
return True
def decrypt(packet):
if packet.HasField("decoded"):
return
if _try_decrypt(packet, PRIMARY_KEY):
return
for key in SECONDARY_KEYS:
if _try_decrypt(packet, key):
return
async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_passwd):
identifier = str(random.getrandbits(16))
msg_count = 0
start_time = None
while True:
try:
async with aiomqtt.Client(
@@ -36,10 +110,15 @@ async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_pa
password=mqtt_passwd,
identifier=identifier,
) as client:
logger.info(f"Connected to MQTT broker at {mqtt_server}:{mqtt_port}")
for topic in topics:
print(f"Subscribing to: {topic}")
logger.info(f"Subscribing to: {topic}")
await client.subscribe(topic)
# Reset start time when connected
if start_time is None:
start_time = time.time()
async for msg in client.messages:
try:
envelope = ServiceEnvelope.FromString(msg.payload)
@@ -51,12 +130,23 @@ async def get_topic_envelopes(mqtt_server, mqtt_port, topics, mqtt_user, mqtt_pa
if not envelope.packet.decoded:
continue
# Skip packets from specific node
if getattr(envelope.packet, "from", None) == 2144342101:
# Skip packets from configured node IDs
if getattr(envelope.packet, "from", None) in SKIP_NODE_IDS:
continue
msg_count += 1
# FIXME: make this interval configurable or time based
if (
msg_count % 10000 == 0
): # Log notice every 10000 messages (approx every hour at 3/sec)
elapsed_time = time.time() - start_time
msg_rate = msg_count / elapsed_time if elapsed_time > 0 else 0
logger.info(
f"Processed {msg_count} messages so far... ({msg_rate:.2f} msg/sec)"
)
yield msg.topic.value, envelope
except aiomqtt.MqttError as e:
print(f"MQTT error: {e}, reconnecting in 1s...")
logger.error(f"MQTT error: {e}, reconnecting in 1s...")
await asyncio.sleep(1)

View File

@@ -1,38 +1,50 @@
import datetime
import logging
import re
import time
from sqlalchemy import select
from sqlalchemy import update
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
from sqlalchemy.exc import IntegrityError
from meshtastic.protobuf.config_pb2 import Config
from meshtastic.protobuf.mesh_pb2 import HardwareModel
from meshtastic.protobuf.portnums_pb2 import PortNum
from meshtastic.protobuf.mesh_pb2 import User, HardwareModel
from meshview import mqtt_database
from meshview import decode_payload
from meshview.models import Packet, PacketSeen, Node, Traceroute
from meshview import decode_payload, mqtt_database
from meshview.models import Node, Packet, PacketSeen, Traceroute
logger = logging.getLogger(__name__)
async def process_envelope(topic, env):
# Checking if the received packet is a MAP_REPORT
# Update the node table with the firmware version
# MAP_REPORT_APP
if env.packet.decoded.portnum == PortNum.MAP_REPORT_APP:
# Extract the node ID from the packet and format the user ID
node_id = getattr(env.packet, "from")
user_id = f"!{node_id:0{8}x}"
# Decode the MAP report payload
map_report = decode_payload.decode_payload(PortNum.MAP_REPORT_APP, env.packet.decoded.payload)
map_report = decode_payload.decode_payload(
PortNum.MAP_REPORT_APP, env.packet.decoded.payload
)
# Establish an asynchronous database session
async with mqtt_database.async_session() as session:
try:
hw_model = HardwareModel.Name(map_report.hw_model) if hasattr(HardwareModel, 'Name') else "unknown"
role = Config.DeviceConfig.Role.Name(map_report.role) if hasattr(Config.DeviceConfig.Role,
'Name') else "unknown"
node = (await session.execute(select(Node).where(Node.node_id == node_id))).scalar_one_or_none()
hw_model = (
HardwareModel.Name(map_report.hw_model)
if hasattr(HardwareModel, "Name")
else "unknown"
)
role = (
Config.DeviceConfig.Role.Name(map_report.role)
if hasattr(Config.DeviceConfig.Role, "Name")
else "unknown"
)
node = (
await session.execute(select(Node).where(Node.node_id == node_id))
).scalar_one_or_none()
now_us = int(time.time() * 1_000_000)
# Some nodes might have uplink disabled for the default channel
# and only be sending map reports, so check if it exists yet
if node:
node.node_id = node_id
node.long_name = map_report.long_name
@@ -43,53 +55,92 @@ async def process_envelope(topic, env):
node.last_lat = map_report.latitude_i
node.last_long = map_report.longitude_i
node.firmware = map_report.firmware_version
node.last_update = datetime.datetime.now()
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
else:
node = Node(
id=user_id, node_id=node_id,
long_name=map_report.long_name, short_name=map_report.short_name,
hw_model=hw_model, role=role, channel=env.channel_id,
id=user_id,
node_id=node_id,
long_name=map_report.long_name,
short_name=map_report.short_name,
hw_model=hw_model,
role=role,
channel=env.channel_id,
firmware=map_report.firmware_version,
last_lat=map_report.latitude_i, last_long=map_report.longitude_i,
last_update=datetime.datetime.now(),
last_lat=map_report.latitude_i,
last_long=map_report.longitude_i,
first_seen_us=now_us,
last_seen_us=now_us,
)
session.add(node)
except Exception as e:
print(f"Error processing MAP_REPORT_APP: {e}")
# Commit the changes to the database
await session.commit()
# This ignores any packet that does not have a ID
if not env.packet.id:
return
async with mqtt_database.async_session() as session:
# --- Packet insert with ON CONFLICT DO NOTHING
result = await session.execute(select(Packet).where(Packet.id == env.packet.id))
new_packet = False
packet = result.scalar_one_or_none()
if not packet:
new_packet = True
packet = Packet(
id=env.packet.id,
portnum=env.packet.decoded.portnum,
from_node_id=getattr(env.packet, "from"),
to_node_id=env.packet.to,
payload=env.packet.SerializeToString(),
import_time=datetime.datetime.now(),
channel=env.channel_id,
)
session.add(packet)
now_us = int(time.time() * 1_000_000)
packet_values = {
"id": env.packet.id,
"portnum": env.packet.decoded.portnum,
"from_node_id": getattr(env.packet, "from"),
"to_node_id": env.packet.to,
"payload": env.packet.SerializeToString(),
"import_time_us": now_us,
"channel": env.channel_id,
}
utc_time = datetime.datetime.fromtimestamp(now_us / 1_000_000, datetime.UTC)
dialect = session.get_bind().dialect.name
stmt = None
if dialect == "sqlite":
stmt = (
sqlite_insert(Packet)
.values(**packet_values)
.on_conflict_do_nothing(index_elements=["id"])
)
elif dialect == "postgresql":
stmt = (
pg_insert(Packet)
.values(**packet_values)
.on_conflict_do_nothing(index_elements=["id"])
)
if stmt is not None:
await session.execute(stmt)
else:
try:
async with session.begin_nested():
session.add(Packet(**packet_values))
await session.flush()
except IntegrityError:
pass
# --- PacketSeen (no conflict handling here, normal insert)
if not env.gateway_id:
print("WARNING: Missing gateway_id, skipping PacketSeen entry")
# Most likely a misconfiguration of a mqtt publisher?
return
else:
node_id = int(env.gateway_id[1:], 16)
result = await session.execute(
select(PacketSeen).where(
PacketSeen.packet_id == env.packet.id,
PacketSeen.node_id == int(env.gateway_id[1:], 16),
PacketSeen.node_id == node_id,
PacketSeen.rx_time == env.packet.rx_time,
)
)
seen = None
if not result.scalar_one_or_none():
now_us = int(time.time() * 1_000_000)
seen = PacketSeen(
packet_id=env.packet.id,
node_id=int(env.gateway_id[1:], 16),
@@ -100,20 +151,38 @@ async def process_envelope(topic, env):
hop_limit=env.packet.hop_limit,
hop_start=env.packet.hop_start,
topic=topic,
import_time=datetime.datetime.now(),
import_time_us=now_us,
)
session.add(seen)
# --- NODEINFO_APP handling
if env.packet.decoded.portnum == PortNum.NODEINFO_APP:
try:
user = decode_payload.decode_payload(PortNum.NODEINFO_APP, env.packet.decoded.payload)
user = decode_payload.decode_payload(
PortNum.NODEINFO_APP, env.packet.decoded.payload
)
if user and user.id:
node_id = int(user.id[1:], 16) if user.id[0] == "!" else None
hw_model = HardwareModel.Name(user.hw_model) if user.hw_model in HardwareModel.values() else f"unknown({user.hw_model})"
role = Config.DeviceConfig.Role.Name(user.role) if hasattr(Config.DeviceConfig.Role,
'Name') else "unknown"
if user.id[0] == "!" and re.fullmatch(r"[0-9a-fA-F]+", user.id[1:]):
node_id = int(user.id[1:], 16)
else:
node_id = None
node = (await session.execute(select(Node).where(Node.id == user.id))).scalar_one_or_none()
hw_model = (
HardwareModel.Name(user.hw_model)
if user.hw_model in HardwareModel.values()
else f"unknown({user.hw_model})"
)
role = (
Config.DeviceConfig.Role.Name(user.role)
if hasattr(Config.DeviceConfig.Role, "Name")
else "unknown"
)
node = (
await session.execute(select(Node).where(Node.id == user.id))
).scalar_one_or_none()
now_us = int(time.time() * 1_000_000)
if node:
node.node_id = node_id
@@ -122,48 +191,57 @@ async def process_envelope(topic, env):
node.hw_model = hw_model
node.role = role
node.channel = env.channel_id
node.last_update = datetime.datetime.now()
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
else:
node = Node(
id=user.id, node_id=node_id,
long_name=user.long_name, short_name=user.short_name,
hw_model=hw_model, role=role, channel=env.channel_id,
last_update=datetime.datetime.now(),
id=user.id,
node_id=node_id,
long_name=user.long_name,
short_name=user.short_name,
hw_model=hw_model,
role=role,
channel=env.channel_id,
first_seen_us=now_us,
last_seen_us=now_us,
)
session.add(node)
except Exception as e:
print(f"Error processing NODEINFO_APP: {e}")
# --- POSITION_APP handling
if env.packet.decoded.portnum == PortNum.POSITION_APP:
position = decode_payload.decode_payload(
PortNum.POSITION_APP, env.packet.decoded.payload
)
if position and position.latitude_i and position.longitude_i:
from_node_id = getattr(env.packet, 'from')
node = (await session.execute(select(Node).where(Node.node_id == from_node_id))).scalar_one_or_none()
from_node_id = getattr(env.packet, "from")
node = (
await session.execute(select(Node).where(Node.node_id == from_node_id))
).scalar_one_or_none()
if node:
now_us = int(time.time() * 1_000_000)
node.last_lat = position.latitude_i
node.last_long = position.longitude_i
node.last_seen_us = now_us
if node.first_seen_us is None:
node.first_seen_us = now_us
session.add(node)
# --- TRACEROUTE_APP (no conflict handling, normal insert)
if env.packet.decoded.portnum == PortNum.TRACEROUTE_APP:
packet_id = None
if env.packet.decoded.want_response:
packet_id = env.packet.id
else:
result = await session.execute(select(Packet).where(Packet.id == env.packet.decoded.request_id))
if result.scalar_one_or_none():
packet_id = env.packet.decoded.request_id
packet_id = env.packet.id
if packet_id is not None:
session.add(Traceroute(
packet_id=packet_id,
route=env.packet.decoded.payload,
done=not env.packet.decoded.want_response,
gateway_node_id=int(env.gateway_id[1:], 16),
import_time=datetime.datetime.now(),
))
now_us = int(time.time() * 1_000_000)
session.add(
Traceroute(
packet_id=packet_id,
route=env.packet.decoded.payload,
done=not env.packet.decoded.want_response,
gateway_node_id=int(env.gateway_id[1:], 16),
import_time_us=now_us,
)
)
await session.commit()
if new_packet:
await packet.awaitable_attrs.to_node
await packet.awaitable_attrs.from_node

View File

@@ -1,6 +1,6 @@
import asyncio
import contextlib
from collections import defaultdict
import asyncio
waiting_node_ids_events = defaultdict(set)
@@ -36,11 +36,13 @@ def create_event(node_id):
def remove_event(node_event):
waiting_node_ids_events[node_event.node_id].remove(node_event)
def notify_packet(node_id, packet):
for event in waiting_node_ids_events[node_id]:
event.packets.append(packet)
event.set()
def notify_uplinked(node_id, packet):
for event in waiting_node_ids_events[node_id]:
event.uplinked.append(packet)

View File

@@ -1,100 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>API Documentation - Config</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist/swagger-ui.css" />
<style>
body { margin: 0; background: #ffffff; color: #000; }
#swagger-ui { background: #ffffff; color: #000; }
.swagger-ui { background-color: #ffffff !important; color: #000 !important; }
.swagger-ui .topbar,
.swagger-ui .info,
.swagger-ui .opblock-summary-description,
.swagger-ui .parameters-col_description,
.swagger-ui .response-col_description { color: #000 !important; }
.swagger-ui .opblock { background-color: #f9f9f9 !important; border-color: #ddd !important; }
.swagger-ui .opblock-summary { background-color: #eaeaea !important; color: #000 !important; }
.swagger-ui .opblock-section-header { color: #000 !important; }
.swagger-ui .parameters,
.swagger-ui .response { background-color: #fafafa !important; color: #000 !important; }
.swagger-ui table { color: #000 !important; }
.swagger-ui a { color: #1a0dab !important; }
.swagger-ui input,
.swagger-ui select,
.swagger-ui textarea { background-color: #fff !important; color: #000 !important; border: 1px solid #ccc !important; }
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"></script>
<script>
const spec = {
openapi: "3.0.0",
info: {
title: "Site Config API",
version: "1.0.0",
description: "API for retrieving the site configuration. This endpoint does not take any parameters."
},
paths: {
"/api/config": {
get: {
summary: "Get site configuration",
description: "Returns the current site configuration object.",
responses: {
"200": {
description: "Successful response",
content: {
"application/json": {
schema: {
type: "object",
properties: {
site_config: {
type: "object",
additionalProperties: true,
example: {
site_name: "MeshView",
firehose_interval: 1000,
starting: "/nodes",
theme: "dark"
}
}
}
}
}
}
},
"500": {
description: "Server error",
content: {
"application/json": {
schema: {
type: "object",
properties: {
error: { type: "string", example: "Internal server error" }
}
}
}
}
}
}
}
}
}
};
window.onload = () => {
SwaggerUIBundle({
spec,
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout"
});
};
</script>
</body>
</html>

View File

@@ -1,109 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>API Documentation - Edges</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist/swagger-ui.css" />
<style>
body {
margin: 0;
background: #ffffff;
color: #000000;
}
#swagger-ui { background: #ffffff; color: #000; }
.swagger-ui { background-color: #ffffff !important; color: #000 !important; }
.swagger-ui .topbar,
.swagger-ui .info,
.swagger-ui .opblock-summary-description,
.swagger-ui .parameters-col_description,
.swagger-ui .response-col_description { color: #000 !important; }
.swagger-ui .opblock { background-color: #f9f9f9 !important; border-color: #ddd !important; }
.swagger-ui .opblock-summary { background-color: #eaeaea !important; color: #000 !important; }
.swagger-ui .opblock-section-header { color: #000 !important; }
.swagger-ui .parameters,
.swagger-ui .response { background-color: #fafafa !important; color: #000 !important; }
.swagger-ui table { color: #000 !important; }
.swagger-ui a { color: #1a0dab !important; }
.swagger-ui input,
.swagger-ui select,
.swagger-ui textarea { background-color: #fff !important; color: #000 !important; border: 1px solid #ccc !important; }
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"></script>
<script>
const spec = {
openapi: "3.0.0",
info: {
title: "Network Edges API",
version: "1.0.0",
description: "API for retrieving network edges derived from traceroutes and neighbor info packets, with optional type filtering."
},
paths: {
"/api/edges": {
get: {
summary: "Get network edges",
description: "Returns edges between nodes in the network. Optionally filter by type (`traceroute` or `neighbor`).",
parameters: [
{
name: "type",
in: "query",
required: false,
description: "Optional filter to only return edges of this type (`traceroute` or `neighbor`).",
schema: { type: "string", enum: ["traceroute", "neighbor"] }
}
],
responses: {
"200": {
description: "Successful response",
content: {
"application/json": {
schema: {
type: "object",
properties: {
edges: {
type: "array",
items: {
type: "object",
properties: {
from: { type: "integer", example: 101 },
to: { type: "integer", example: 102 },
type: { type: "string", example: "traceroute" }
}
}
}
}
}
}
}
},
"400": {
description: "Invalid request parameters",
content: {
"application/json": {
schema: { type: "object", properties: { error: { type: "string" } } }
}
}
}
}
}
}
}
};
window.onload = () => {
SwaggerUIBundle({
spec,
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout"
});
};
</script>
</body>
</html>

View File

@@ -1,134 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>API Documentation - Nodes</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist/swagger-ui.css" />
<style>
body { margin: 0; background: #ffffff; color: #000; }
#swagger-ui { background: #ffffff; color: #000; }
.swagger-ui { background-color: #ffffff !important; color: #000 !important; }
.swagger-ui .topbar,
.swagger-ui .info,
.swagger-ui .opblock-summary-description,
.swagger-ui .parameters-col_description,
.swagger-ui .response-col_description { color: #000 !important; }
.swagger-ui .opblock { background-color: #f9f9f9 !important; border-color: #ddd !important; }
.swagger-ui .opblock-summary { background-color: #eaeaea !important; color: #000 !important; }
.swagger-ui .opblock-section-header { color: #000 !important; }
.swagger-ui .parameters,
.swagger-ui .response { background-color: #fafafa !important; color: #000 !important; }
.swagger-ui table { color: #000 !important; }
.swagger-ui a { color: #1a0dab !important; }
.swagger-ui input,
.swagger-ui select,
.swagger-ui textarea { background-color: #fff !important; color: #000 !important; border: 1px solid #ccc !important; }
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"></script>
<script>
const spec = {
openapi: "3.0.0",
info: {
title: "Network Nodes API",
version: "1.0.0",
description: "API for retrieving nodes in the network with optional filters by last seen time."
},
paths: {
"/api/nodes": {
get: {
summary: "Get network nodes",
description: "Returns a list of nodes with optional filtering by recent activity.",
parameters: [
{
name: "hours",
in: "query",
required: false,
description: "Return nodes seen in the last X hours.",
schema: { type: "integer", example: 24 }
},
{
name: "days",
in: "query",
required: false,
description: "Return nodes seen in the last X days.",
schema: { type: "integer", example: 7 }
},
{
name: "last_seen_after",
in: "query",
required: false,
description: "Return nodes last seen after this ISO8601 timestamp.",
schema: { type: "string", format: "date-time", example: "2025-08-25T14:00:00" }
}
],
responses: {
"200": {
description: "Successful response",
content: {
"application/json": {
schema: {
type: "object",
properties: {
nodes: {
type: "array",
items: {
type: "object",
properties: {
node_id: { type: "integer", example: 101 },
long_name: { type: "string", example: "Node Alpha" },
short_name: { type: "string", example: "A" },
channel: { type: "string", example: "2" },
last_seen: { type: "string", format: "date-time", example: "2025-08-25T12:00:00" },
last_lat: { type: "number", format: "float", example: 37.7749 },
last_long: { type: "number", format: "float", example: -122.4194 },
hardware: { type: "string", example: "Heltec V3" },
firmware: { type: "string", example: "1.0.5" },
role: { type: "string", example: "router" }
}
}
}
}
}
}
}
},
"400": {
description: "Invalid request parameters",
content: {
"application/json": {
schema: { type: "object", properties: { error: { type: "string" } } }
}
}
},
"500": {
description: "Server error",
content: {
"application/json": {
schema: { type: "object", properties: { error: { type: "string" } } }
}
}
}
}
}
}
}
};
window.onload = () => {
SwaggerUIBundle({
spec,
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout"
});
};
</script>
</body>
</html>

View File

@@ -1,167 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>API Documentation - Packets</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist/swagger-ui.css" />
<style>
body {
margin: 0;
background: #ffffff;
color: #000000;
}
#swagger-ui {
background: #ffffff;
color: #000000;
}
/* Override Swagger UI colors for white background */
.swagger-ui {
background-color: #ffffff !important;
color: #000000 !important;
}
.swagger-ui .topbar,
.swagger-ui .info,
.swagger-ui .opblock-summary-description,
.swagger-ui .parameters-col_description,
.swagger-ui .response-col_description {
color: #000000 !important;
}
.swagger-ui .opblock {
background-color: #f9f9f9 !important;
border-color: #ddd !important;
}
.swagger-ui .opblock-summary {
background-color: #eaeaea !important;
color: #000 !important;
}
.swagger-ui .opblock-section-header {
color: #000 !important;
}
.swagger-ui .parameters,
.swagger-ui .response {
background-color: #fafafa !important;
color: #000 !important;
}
.swagger-ui table {
color: #000 !important;
}
.swagger-ui a {
color: #1a0dab !important; /* classic link blue */
}
.swagger-ui input,
.swagger-ui select,
.swagger-ui textarea {
background-color: #fff !important;
color: #000 !important;
border: 1px solid #ccc !important;
}
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"></script>
<script>
const spec = {
openapi: "3.0.0",
info: {
title: "Packets API",
version: "1.0.0",
description: "API for retrieving packet records with optional filters."
},
paths: {
"/api/packets": {
get: {
summary: "Get packets",
description: "Returns a list of recent packets, optionally filtered by a timestamp and limited by count.",
parameters: [
{
name: "limit",
in: "query",
required: false,
description: "Maximum number of packets to return. Default is 200.",
schema: {
type: "integer",
default: 200
}
},
{
name: "since",
in: "query",
required: false,
description: "Only return packets imported after this ISO8601 timestamp (e.g., `2025-08-12T14:15:20`).",
schema: {
type: "string",
format: "date-time"
}
}
],
responses: {
"200": {
description: "Successful response",
content: {
"application/json": {
schema: {
type: "object",
properties: {
packets: {
type: "array",
items: {
type: "object",
properties: {
id: { type: "integer", example: 196988973 },
from_node_id: { type: "integer", example: 2381019191 },
to_node_id: { type: "integer", example: 1234567890 },
portnum: { type: "integer", example: 1 },
import_time: { type: "string", format: "date-time", example: "2025-08-12T14:15:20.503827" },
payload: { type: "string", example: "Hello Mesh" }
}
}
}
}
}
}
}
},
"500": {
description: "Internal server error",
content: {
"application/json": {
schema: {
type: "object",
properties: {
error: { type: "string", example: "Failed to fetch packets" }
}
}
}
}
}
}
}
}
}
};
window.onload = () => {
SwaggerUIBundle({
spec,
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout"
});
};
</script>
</body>
</html>

View File

@@ -1,210 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>API Documentation - Packet Stats</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist/swagger-ui.css" />
<style>
body {
margin: 0;
background: #ffffff;
color: #000000;
}
#swagger-ui {
background: #ffffff;
color: #000000;
}
/* Override Swagger UI colors for white background */
.swagger-ui {
background-color: #ffffff !important;
color: #000000 !important;
}
.swagger-ui .topbar,
.swagger-ui .info,
.swagger-ui .opblock-summary-description,
.swagger-ui .parameters-col_description,
.swagger-ui .response-col_description {
color: #000000 !important;
}
.swagger-ui .opblock {
background-color: #f9f9f9 !important;
border-color: #ddd !important;
}
.swagger-ui .opblock-summary {
background-color: #eaeaea !important;
color: #000 !important;
}
.swagger-ui .opblock-section-header {
color: #000 !important;
}
.swagger-ui .parameters,
.swagger-ui .response {
background-color: #fafafa !important;
color: #000 !important;
}
.swagger-ui table {
color: #000 !important;
}
.swagger-ui a {
color: #1a0dab !important; /* classic link blue */
}
.swagger-ui input,
.swagger-ui select,
.swagger-ui textarea {
background-color: #fff !important;
color: #000 !important;
border: 1px solid #ccc !important;
}
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"></script>
<script>
const spec = {
openapi: "3.0.0",
info: {
title: "Packet Statistics API",
version: "1.0.0",
description: "API for retrieving packet statistics over a given period with optional filters."
},
paths: {
"/api/stats": {
get: {
summary: "Get packet statistics",
description: "Returns packet statistics for a given period type and length, with optional filters.",
parameters: [
{
name: "period_type",
in: "query",
required: false,
description: "Type of period to group by (`hour` or `day`). Default is `hour`.",
schema: {
type: "string",
enum: ["hour", "day"]
}
},
{
name: "length",
in: "query",
required: false,
description: "Number of periods to include. Default is 24.",
schema: {
type: "integer",
default: 24
}
},
{
name: "channel",
in: "query",
required: false,
description: "Filter by channel name.",
schema: {
type: "string"
}
},
{
name: "portnum",
in: "query",
required: false,
description: "Filter by port number.",
schema: {
type: "integer"
}
},
{
name: "to_node",
in: "query",
required: false,
description: "Filter by destination node ID.",
schema: {
type: "integer"
}
},
{
name: "from_node",
in: "query",
required: false,
description: "Filter by source node ID.",
schema: {
type: "integer"
}
}
],
responses: {
"200": {
description: "Successful response",
content: {
"application/json": {
schema: {
type: "object",
properties: {
hourly: {
type: "object",
properties: {
period_type: { type: "string" },
length: { type: "integer" },
filters: { type: "object" },
data: {
type: "array",
items: {
type: "object",
properties: {
period: { type: "string", example: "2025-08-06 19:00" },
node_id: { type: "integer" },
long_name: { type: "string" },
short_name: { type: "string" },
packets: { type: "integer" }
}
}
}
}
}
}
}
}
}
},
"400": {
description: "Invalid request parameters",
content: {
"application/json": {
schema: {
type: "object",
properties: {
error: { type: "string" }
}
}
}
}
}
}
}
}
}
};
window.onload = () => {
SwaggerUIBundle({
spec,
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout"
});
};
</script>
</body>
</html>

View File

@@ -1,91 +1,273 @@
<!DOCTYPE html>
<html lang="en">
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>API Index</title>
<style>
<meta charset="utf-8">
<title>Meshview API Documentation</title>
<style>
body {
font-family: Arial, sans-serif;
background-color: #1e1e1e;
color: #eaeaea;
margin: 0;
padding: 0;
background: #121212;
color: #eee;
font-family: monospace;
margin: 20px;
line-height: 1.5;
}
header {
background: #2a2a2a;
padding: 20px;
text-align: center;
font-size: 1.6em;
font-weight: bold;
h1, h2, h3 { color: #79c0ff; }
code {
background: #1e1e1e;
padding: 3px 6px;
border-radius: 4px;
color: #ffd479;
font-size: 0.95rem;
}
.container {
max-width: 800px;
margin: 30px auto;
padding: 20px;
.endpoint {
border: 1px solid #333;
padding: 12px;
margin-bottom: 18px;
border-radius: 8px;
background: #1a1a1a;
}
ul {
list-style: none;
padding: 0;
.method {
display: inline-block;
padding: 2px 8px;
border-radius: 4px;
margin-right: 6px;
font-weight: bold;
}
li {
background: #272b2f;
border: 1px solid #474b4e;
padding: 15px 20px;
margin-bottom: 15px;
border-radius: 10px;
transition: background 0.2s;
.get { background: #0066cc; }
.path { font-weight: bold; color: #fff; }
table {
width: 100%;
border-collapse: collapse;
margin-top: 8px;
}
li:hover {
background: #33383d;
th, td {
border: 1px solid #444;
padding: 6px 10px;
}
a {
color: #4cafef;
text-decoration: none;
font-weight: bold;
font-size: 1.1em;
th {
background: #222;
color: #9ddcff;
}
p {
margin: 8px 0 0 0;
font-size: 0.9em;
color: #bbbbbb;
.example {
margin-top: 10px;
padding: 10px;
background: #161616;
border-radius: 6px;
border: 1px solid #333;
}
</style>
</style>
</head>
<body>
<header>
API Index
</header>
<h1>Meshview API Documentation</h1>
<p>This page describes all REST endpoints provided by Meshview.</p>
<div class="container">
<ul>
<li>
<a href="/api-chat">Chat API</a>
<p> View chat messages.</p>
</li>
<li>
<a href="/api-nodes">Node API</a>
<p>Retrieve node information.</p>
</li>
<li>
<a href="/api-packets">Packet API</a>
<p>Access raw packet data.</p>
</li>
<li>
<a href="/api-stats">Statistics API </a>
<p>View system and traffic statistics.</p>
</li>
<li>
<a href="/api-edges">Edges API</a>
<p>Get edges details.</p>
</li>
<li>
<a href="/api-config">Configuration API</a>
<p>Get and update configuration details.</p>
</li>
</ul>
<!------------------------------ NODES ------------------------------>
<h2>/api/nodes</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/nodes</span>
<p>Returns a list of mesh nodes.</p>
<h3>Query Parameters</h3>
<table>
<tr><th>Parameter</th><th>Description</th></tr>
<tr><td>role</td><td>Filter by node role</td></tr>
<tr><td>channel</td><td>Filter by channel</td></tr>
<tr><td>hw_model</td><td>Hardware model filter</td></tr>
<tr><td>days_active</td><td>Only nodes seen within X days</td></tr>
</table>
<div class="example">
<b>Example:</b><br>
<code>/api/nodes?days_active=3</code>
</div>
</div>
<!------------------------------ PACKETS ------------------------------>
<h2>/api/packets</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/packets</span>
<p>Fetch packets with many filters. Returns decoded packet data.</p>
<h3>Query Parameters</h3>
<table>
<tr><th>Parameter</th><th>Description</th></tr>
<tr><td>packet_id</td><td>Return exactly one packet</td></tr>
<tr><td>limit</td><td>Max number of results (1100)</td></tr>
<tr><td>since</td><td>Only packets newer than import_time_us</td></tr>
<tr><td>from_node_id</td><td>Filter by sender node</td></tr>
<tr><td>to_node_id</td><td>Filter by destination node</td></tr>
<tr><td>node_id</td><td>Legacy: match either from or to</td></tr>
<tr><td>portnum</td><td>Filter by port number</td></tr>
<tr><td>contains</td><td>Substring filter for payload</td></tr>
</table>
<div class="example">
<b>Example:</b><br>
<code>/api/packets?from_node_id=123&limit=100</code>
</div>
</div>
<!------------------------------ PACKETS SEEN ------------------------------>
<h2>/api/packets_seen/{packet_id}</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/packets_seen/&lt;packet_id&gt;</span>
<p>Returns list of gateways that heard the packet (RSSI/SNR/hops).</p>
<div class="example">
<b>Example:</b><br>
<code>/api/packets_seen/3314808102</code>
</div>
</div>
<!------------------------------ STATS ------------------------------>
<h2>/api/stats</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/stats</span>
<p>Returns aggregated packet statistics for a node or globally.</p>
<h3>Query Parameters</h3>
<table>
<tr><th>Parameter</th><th>Description</th></tr>
<tr><td>period_type</td><td>"hour" or "day"</td></tr>
<tr><td>length</td><td>How many hours/days</td></tr>
<tr><td>node</td><td>Node ID for combined sent+seen stats</td></tr>
<tr><td>from_node</td><td>Filter by sender</td></tr>
<tr><td>to_node</td><td>Filter by receiver</td></tr>
<tr><td>portnum</td><td>Filter by port</td></tr>
<tr><td>channel</td><td>Filter by channel</td></tr>
</table>
<div class="example">
<b>Example:</b><br>
<code>/api/stats?node=1128180332&period_type=day&length=1</code>
</div>
</div>
<!------------------------------ STATS COUNT ------------------------------>
<h2>/api/stats/count</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/stats/count</span>
<p>
Returns <b>total packets</b> and <b>total packet_seen entries</b>.
When no filters are provided, it returns global totals.
When filters are specified, they narrow the time, channel,
direction, or specific packet.
</p>
<h3>Query Parameters</h3>
<table>
<tr><th>Parameter</th><th>Description</th></tr>
<tr><td>period_type</td><td>"hour" or "day"</td></tr>
<tr><td>length</td><td>Number of hours or days (depends on period_type)</td></tr>
<tr><td>channel</td><td>Filter by channel</td></tr>
<tr><td>from_node</td><td>Only packets sent by this node</td></tr>
<tr><td>to_node</td><td>Only packets received by this node</td></tr>
<tr><td>packet_id</td><td>Filter seen counts for specific packet_id</td></tr>
</table>
<div class="example">
<b>Example:</b><br>
<code>/api/stats/count?from_node=1128180332&period_type=day&length=1</code>
</div>
</div>
<!------------------------------ EDGES ------------------------------>
<h2>/api/edges</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/edges</span>
<p>Returns traceroute and/or neighbor edges for graph rendering.</p>
<h3>Query Parameters</h3>
<table>
<tr><th>Parameter</th><th>Description</th></tr>
<tr><td>type</td><td>"traceroute", "neighbor", or omitted for both</td></tr>
</table>
<div class="example">
<b>Example:</b><br>
<code>/api/edges?type=neighbor</code>
</div>
</div>
<!------------------------------ CONFIG ------------------------------>
<h2>/api/config</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/config</span>
<p>Returns Meshview configuration (site, MQTT, cleanup, etc.).</p>
<div class="example">
<code>/api/config</code>
</div>
</div>
<!------------------------------ LANG ------------------------------>
<h2>/api/lang</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/api/lang</span>
<p>Returns translated text for the UI.</p>
<h3>Parameters</h3>
<table>
<tr><th>lang</th><td>Language code (e.g. "en")</td></tr>
<tr><th>section</th><td>Optional UI section (firehose, map, top...)</td></tr>
</table>
<div class="example">
<code>/api/lang?lang=en&section=firehose</code>
</div>
</div>
<!------------------------------ HEALTH ------------------------------>
<h2>/health</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/health</span>
<p>Returns API + database status.</p>
</div>
<!------------------------------ VERSION ------------------------------>
<h2>/version</h2>
<div class="endpoint">
<span class="method get">GET</span>
<span class="path">/version</span>
<p>Returns Meshview version and Git revision.</p>
</div>
<br><br>
<hr>
<p style="text-align:center; color:#666;">Meshview API — generated documentation</p>
</body>
</html>

View File

@@ -0,0 +1,164 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Mesh Nodes Population Heatmap</title>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" />
<style>
body { margin: 0; background: #000; }
#map { height: 100vh; width: 100%; }
#legend {
position: absolute; bottom: 10px; right: 10px;
background: rgba(0,0,0,0.8);
color: white; padding: 10px 14px;
font-family: monospace; font-size: 13px;
border-radius: 5px; z-index: 1000;
box-shadow: 0 0 10px rgba(0,0,0,0.6);
}
.legend-item { display: flex; align-items: center; margin-bottom: 5px; }
.legend-color { width: 18px; height: 18px; margin-right: 6px; border-radius: 3px; }
</style>
</head>
<body>
<div id="map"></div>
<div id="legend"></div>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
<script src="https://unpkg.com/leaflet.heat/dist/leaflet-heat.js"></script>
<script>
const map = L.map("map");
L.tileLayer("https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png", {
maxZoom: 19,
attribution: "© OpenStreetMap"
}).addTo(map);
let heatLayer = null;
let nodeCoords = [];
let hoverTooltip = L.tooltip({
permanent: false,
direction: "top",
className: "node-tooltip"
});
// --- Legend ---
const legend = document.getElementById("legend");
const legendItems = [
{ color: "#0000ff", label: "Low" },
{ color: "#8000ff", label: "Moderate" },
{ color: "#00ffff", label: "Elevated" },
{ color: "#00ff00", label: "High" },
{ color: "#ffff00", label: "Very High" },
{ color: "#ff0000", label: "Congested?" }
];
legendItems.forEach(item => {
const div = document.createElement("div");
div.className = "legend-item";
const colorBox = document.createElement("div");
colorBox.className = "legend-color";
colorBox.style.background = item.color;
const label = document.createElement("span");
label.textContent = item.label;
div.appendChild(colorBox);
div.appendChild(label);
legend.appendChild(div);
});
// --- Load nodes and create heatmap ---
async function loadNodes() {
try {
const res = await fetch("/api/nodes?days_active=3");
if (!res.ok) throw new Error(`HTTP error ${res.status}`);
const data = await res.json();
const nodes = data.nodes || [];
nodeCoords = [];
const heatPoints = [];
nodes.forEach(node => {
const lat = node.last_lat / 1e7;
const lng = node.last_long / 1e7;
if (lat && lng && !isNaN(lat) && !isNaN(lng)) {
nodeCoords.push([lat, lng]);
heatPoints.push([lat, lng, 1.0]); // equal weight per node
}
});
if (heatLayer) map.removeLayer(heatLayer);
heatLayer = L.heatLayer(heatPoints, {
radius: 18, // smaller circles
blur: 10, // slightly tighter glow
maxZoom: 15,
minOpacity: 0.4,
gradient: {
0.0: "#0000ff", // deep blue
0.2: "#8000ff", // purple
0.4: "#00ffff", // cyan
0.6: "#00ff00", // green
0.8: "#ffff00", // yellow
0.9: "#ff8000", // orange
1.0: "#ff0000" // red
}
}).addTo(map);
await setMapBoundsFromConfig();
} catch (err) {
console.error("Failed to load nodes:", err);
}
}
// --- Map bounds ---
async function setMapBoundsFromConfig() {
try {
const res = await fetch("/api/config");
const config = await res.json();
const topLat = parseFloat(config.site.map_top_left_lat);
const topLon = parseFloat(config.site.map_top_left_lon);
const bottomLat = parseFloat(config.site.map_bottom_right_lat);
const bottomLon = parseFloat(config.site.map_bottom_right_lon);
if ([topLat, topLon, bottomLat, bottomLon].some(v => isNaN(v))) {
throw new Error("Map bounds contain NaN");
}
map.fitBounds([[topLat, topLon], [bottomLat, bottomLon]]);
} catch (err) {
console.error("Failed to load map bounds from config:", err);
map.setView([37.77, -122.42], 9);
}
}
// --- Count nearby nodes ---
function countNearbyNodes(latlng, radiusMeters) {
let count = 0;
const latR = radiusMeters / 111320; // meters per degree lat
const lngR = radiusMeters / (111320 * Math.cos(latlng.lat * Math.PI / 180));
for (const [lat, lng] of nodeCoords) {
if (Math.abs(lat - latlng.lat) <= latR && Math.abs(lng - latlng.lng) <= lngR)
count++;
}
return count;
}
// --- Tooltip on hover ---
map.on("mousemove", e => {
if (!nodeCoords.length) return;
const zoom = map.getZoom();
const radiusMeters = 2000 / Math.pow(2, zoom - 10); // dynamic nearness by zoom
const count = countNearbyNodes(e.latlng, radiusMeters);
if (count > 0) {
hoverTooltip
.setLatLng(e.latlng)
.setContent(`${count} nodes nearby (${radiusMeters.toFixed(0)}m radius)`)
.addTo(map);
} else {
map.closeTooltip(hoverTooltip);
}
});
loadNodes();
</script>
</body>
</html>

244
meshview/static/kiosk.html Normal file
View File

@@ -0,0 +1,244 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Mesh Nodes Live Map</title>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" crossorigin=""/>
<style>
body { margin: 0; font-family: monospace; background: #121212; color: #eee; }
#map { height: 100vh; width: 100%; }
#legend {
position: absolute;
bottom: 10px;
right: 10px;
background: white; /* changed from rgba(0,0,0,0.8) to white */
color: black; /* text color black */
padding: 10px;
border-radius: 5px;
z-index: 1000;
font-size: 13px;
line-height: 1.5;
border: 1px solid #ccc; /* optional: subtle border for white bg */
}
#filter-container { margin-bottom: 6px; text-align: left; }
.filter-checkbox { margin-right: 4px; }
.blinking-tooltip {
background: white;
color: black;
border: 1px solid #000;
border-radius: 4px;
padding: 2px 5px;
}
</style>
</head>
<body>
<div id="map"></div>
<div id="legend">
<div id="filter-container"></div>
</div>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js" crossorigin></script>
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js" crossorigin></script>
<script>
(async function(){
// --- Load config ---
let config = {};
try {
const res = await fetch('/api/config');
config = await res.json();
} catch(err){ console.error('Failed to load config', err); }
const mapInterval = Number(config.site?.map_interval) || 3;
const bayAreaBounds = [
[Number(config.site?.map_top_left_lat), Number(config.site?.map_top_left_lon)],
[Number(config.site?.map_bottom_right_lat), Number(config.site?.map_bottom_right_lon)]
];
// --- Initialize map ---
const map = L.map('map');
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', { maxZoom: 19 }).addTo(map);
map.fitBounds(bayAreaBounds);
// --- Utilities ---
const palette = ["#e6194b","#4363d8","#f58231","#911eb4","#46f0f0","#f032e6","#bcf60c","#fabebe","#008080","#e6beff","#9a6324","#fffac8","#800000","#aaffc3","#808000","#ffd8b1","#000075","#808080"];
const colorMap = new Map(); let nextColorIndex=0;
function hashToColor(str){
if(colorMap.has(str)) return colorMap.get(str);
const color = palette[nextColorIndex % palette.length];
colorMap.set(str, color); nextColorIndex++;
return color;
}
function timeAgoFromUs(us){
const diff = Date.now() - (us / 1000);
const s=Math.floor(diff/1000), m=Math.floor(s/60), h=Math.floor(m/60), d=Math.floor(h/24);
if(d>0) return d+'d'; if(h>0) return h+'h'; if(m>0) return m+'m'; return s+'s';
}
function isInvalidCoord(node){ return !node || !node.last_lat || !node.last_long; }
// --- Load nodes ---
let nodes = [];
try {
const res = await fetch('/api/nodes');
const data = await res.json();
nodes = data.nodes || [];
} catch(err){ console.error('Failed to load nodes', err); }
const markers = {};
const markerById = {}; // Keyed by numeric node_id for packets
const nodeMap = new Map(); // Keyed by numeric node_id
const channels = new Set();
const activeBlinks = new Map();
const portMap = {1:"Text",67:"Telemetry",3:"Position",70:"Traceroute",4:"Node Info",71:"Neighbour Info",73:"Map Report"};
nodes.forEach(node=>{
if(isInvalidCoord(node)) return;
const lat = node.last_lat/1e7;
const lng = node.last_long/1e7;
const isRouter = node.role.toLowerCase().includes("router");
channels.add(node.channel);
nodeMap.set(node.node_id,node);
const color = hashToColor(node.channel);
const marker = L.circleMarker([lat,lng],{radius:isRouter?9:7,color:"white",fillColor:color,fillOpacity:1,weight:0.7}).addTo(map);
marker.nodeId = node.node_id;
marker.originalColor = color;
markerById[node.node_id]=marker;
let popupContent=`<b>${node.long_name} (${node.short_name})</b><br>
<b>Channel:</b> ${node.channel}<br>
<b>Model:</b> ${node.hw_model}<br>
<b>Role:</b> ${node.role}<br>`;
if(node.last_seen_us) popupContent+=`<b>Last seen:</b> ${timeAgoFromUs(node.last_seen_us)}<br>`;
if(node.firmware) popupContent+=`<b>Firmware:</b> ${node.firmware}<br>`;
marker.on('click', e=>{
e.originalEvent.stopPropagation();
marker.bindPopup(popupContent).openPopup();
setTimeout(()=>marker.closePopup(),3000);
});
if(!markers[node.channel]) markers[node.channel]=[];
markers[node.channel].push({marker,isRouter});
});
// --- Filters ---
const filterContainer=document.getElementById('filter-container');
channels.forEach(channel=>{
const id=`filter-${channel.replace(/\s+/g,'-').toLowerCase()}`;
const color=hashToColor(channel);
const label=document.createElement('label');
label.style.color=color;
label.innerHTML=`<input type="checkbox" class="filter-checkbox" id="${id}" checked> ${channel}`;
filterContainer.appendChild(label);
});
function updateMarkers(){
nodes.forEach(node=>{
const id=`filter-${node.channel.replace(/\s+/g,'-').toLowerCase()}`;
const checkbox=document.getElementById(id);
const marker=markerById[node.node_id];
if(marker) marker.setStyle({fillOpacity: checkbox.checked?1:0});
});
localStorage.setItem('meshview_map_filters', JSON.stringify({
channels: Array.from(channels).reduce((obj,c)=>{ obj[c]=document.getElementById(`filter-${c.replace(/\s+/g,'-').toLowerCase()}`).checked; return obj; },{})
}));
}
document.querySelectorAll(".filter-checkbox").forEach(input=>input.addEventListener("change",updateMarkers));
// Load saved filters
const savedFilters=JSON.parse(localStorage.getItem('meshview_map_filters')||'{}');
if(savedFilters.channels){
Object.keys(savedFilters.channels).forEach(c=>{
const checkbox=document.getElementById(`filter-${c.replace(/\s+/g,'-').toLowerCase()}`);
if(checkbox) checkbox.checked=savedFilters.channels[c];
});
}
updateMarkers();
// --- Packet blinking ---
function blinkNode(marker,longName,portnum){
if(!map.hasLayer(marker)) return;
if(activeBlinks.has(marker)){
clearInterval(activeBlinks.get(marker));
marker.setStyle({fillColor: marker.originalColor});
if(marker.tooltip) map.removeLayer(marker.tooltip);
}
let count=0;
const portName=portMap[portnum]||`Port ${portnum}`;
const tooltip=L.tooltip({permanent:true,direction:'top',offset:[0,-marker.options.radius-5],className:'blinking-tooltip'})
.setContent(`${longName} (${portName})`).setLatLng(marker.getLatLng());
tooltip.addTo(map); marker.tooltip=tooltip;
const interval=setInterval(()=>{
if(map.hasLayer(marker)){
marker.setStyle({fillColor:count%2===0?'yellow':marker.originalColor});
marker.bringToFront();
}
count++;
if(count>7){ clearInterval(interval); marker.setStyle({fillColor:marker.originalColor}); map.removeLayer(tooltip); activeBlinks.delete(marker); }
},500);
activeBlinks.set(marker,interval);
}
let lastImportTimeUs = null;
async function fetchLatestPacket(){
try{
const res = await fetch(`/api/packets?limit=1`);
const data = await res.json();
lastImportTimeUs = data.packets?.[0]?.import_time_us || 0;
}catch(err){
console.error(err);
}
}
async function fetchNewPackets(){
if (!lastImportTimeUs) return;
try{
const res = await fetch(`/api/packets?since=${lastImportTimeUs}`);
const data = await res.json();
if(!data.packets || !data.packets.length) return;
let latest = lastImportTimeUs;
data.packets.forEach(packet => {
// Track newest microsecond timestamp
if (packet.import_time_us && packet.import_time_us > latest) {
latest = packet.import_time_us;
}
// Look up marker and blink it
const marker = markerById[packet.from_node_id];
const nodeData = nodeMap.get(packet.from_node_id);
if (marker && nodeData) {
blinkNode(marker, nodeData.long_name, packet.portnum);
}
});
lastImportTimeUs = latest;
}catch(err){
console.error(err);
}
}
if(mapInterval>0){ fetchLatestPacket(); setInterval(fetchNewPackets,mapInterval*1000); }
})();
</script>
</body>
</html>

View File

@@ -1,200 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Mesh Nodes Live Map</title>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" />
<style>
body { margin: 0; }
#map { height: 100vh; width: 100%; }
#legend {
position: absolute; bottom: 10px; right: 10px;
background: rgba(0,0,0,0.7);
color: white; padding: 8px 12px;
font-family: monospace; font-size: 13px;
border-radius: 5px; z-index: 1000;
}
.legend-item { display: flex; align-items: center; margin-bottom: 4px; }
.legend-color { width: 16px; height: 16px; margin-right: 6px; border-radius: 4px; }
/* Floating pulse label style */
.pulse-label span {
background: rgba(0,0,0,0.6);
padding: 2px 4px;
border-radius: 3px;
pointer-events: none;
font-family: monospace;
font-size: 12px;
}
</style>
</head>
<body>
<div id="map"></div>
<div id="legend"></div>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
<script>
const map = L.map("map");
L.tileLayer("https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png", { maxZoom: 19, attribution: "© OpenStreetMap" }).addTo(map);
const nodeMarkers = new Map();
let lastPacketTime = null;
const portColors = {
1:"red",
67:"cyan",
3:"orange",
70:"purple",
4:"yellow",
71:"brown",
73:"pink"
};
const portLabels = {
1:"Text",
67:"Telemetry",
3:"Position",
70:"Traceroute",
4:"Node Info",
71:"Neighbour Info",
73:"Map Report"
};
function getPulseColor(portnum) { return portColors[portnum] || "green"; }
// Generate legend dynamically
const legend = document.getElementById("legend");
for (const [port, color] of Object.entries(portColors)) {
const item = document.createElement("div");
item.className = "legend-item";
const colorBox = document.createElement("div");
colorBox.className = "legend-color";
colorBox.style.background = color;
const label = document.createElement("span");
label.textContent = `${portLabels[port] || "Custom"} (${port})`;
item.appendChild(colorBox);
item.appendChild(label);
legend.appendChild(item);
}
// Pulse marker with floating label on top
function pulseMarker(marker, highlightColor = "red") {
if (!marker) return;
if (marker.activePulse) return;
marker.activePulse = true;
const originalColor = marker.options.originalColor;
const originalRadius = marker.options.originalRadius;
marker.bringToFront();
const nodeInfo = marker.options.nodeInfo || {};
const portLabel = marker.currentPortLabel || "";
const displayName = `${nodeInfo.long_name || nodeInfo.short_name || "Unknown"}${portLabel ? ` (<i>${portLabel}</i>)` : ""}`;
marker.bindTooltip(displayName, {
permanent: true,
direction: 'top',
className: 'pulse-label',
offset: [0, -10],
html: true // Allow italics
}).openTooltip();
const flashDuration = 2000, fadeDuration = 1000, flashInterval = 100, maxRadius = originalRadius + 5;
let flashTime = 0;
const flashTimer = setInterval(() => {
flashTime += flashInterval;
const isOn = (flashTime / flashInterval) % 2 === 0;
marker.setStyle({ fillColor: isOn ? highlightColor : originalColor, radius: isOn ? maxRadius : originalRadius });
if (flashTime >= flashDuration) {
clearInterval(flashTimer);
const fadeStart = performance.now();
function fade(now) {
const t = Math.min((now - fadeStart) / fadeDuration, 1);
const radius = originalRadius + (maxRadius - originalRadius) * (1 - t);
marker.setStyle({ fillColor: highlightColor, radius: radius, fillOpacity: 1 });
if (t < 1) requestAnimationFrame(fade);
else {
marker.setStyle({ fillColor: originalColor, radius: originalRadius, fillOpacity: 1 });
marker.unbindTooltip();
marker.activePulse = false;
}
}
requestAnimationFrame(fade);
}
}, flashInterval);
}
async function loadNodes() {
try {
const res = await fetch("/api/nodes");
const nodes = (await res.json()).nodes;
nodes.forEach(node => {
const color = "blue";
const lat = node.last_lat, lng = node.last_long;
if(lat && lng) {
const marker = L.circleMarker([lat/1e7,lng/1e7], {
radius:7, color:"white", fillColor:color, fillOpacity:1, weight:0.7
}).addTo(map);
marker.options.originalColor=color;
marker.options.originalRadius=7;
marker.options.nodeInfo=node;
marker.bindPopup(`<b>${node.long_name||node.short_name||"Unknown"}</b><br>ID: ${node.node_id}<br>Role: ${node.role}`);
nodeMarkers.set(node.node_id, marker);
} else {
nodeMarkers.set(node.node_id, {options:{nodeInfo:node}});
}
});
const markersWithCoords = Array.from(nodeMarkers.values()).filter(m=>m instanceof L.CircleMarker);
if(markersWithCoords.length>0) {
await setMapBoundsFromConfig();
} else {
map.setView([37.77,-122.42],9);
}
} catch(err){ console.error(err); }
}
async function setMapBoundsFromConfig() {
try {
const res = await fetch("/api/config");
const config = await res.json();
const topLeft = [ parseFloat(config.site.map_top_left_lat), parseFloat(config.site.map_top_left_lon) ];
const bottomRight = [ parseFloat(config.site.map_bottom_right_lat), parseFloat(config.site.map_bottom_right_lon) ];
map.fitBounds([topLeft, bottomRight]);
} catch(err) {
console.error("Failed to load map bounds from config:", err);
map.setView([37.77,-122.42],9);
}
}
async function pollPackets() {
try {
let url = "/api/packets?limit=10";
if (lastPacketTime) url += `&since=${lastPacketTime}`;
const packets = (await (await fetch(url)).json()).packets || [];
if (packets.length > 0) lastPacketTime = packets[0].import_time;
packets.forEach(pkt => {
const marker = nodeMarkers.get(pkt.from_node_id);
// 🔍 Debug log
const nodeName = marker?.options?.nodeInfo?.short_name || marker?.options?.nodeInfo?.long_name || "Unknown";
console.log(`Packet received: port=${pkt.portnum}, node=${nodeName}`);
if (marker instanceof L.CircleMarker) {
marker.currentPortLabel = portLabels[pkt.portnum] || `${pkt.portnum}`; // Save label
pulseMarker(marker, getPulseColor(pkt.portnum));
}
});
} catch (err) {
console.error(err);
}
}
1
loadNodes().then(()=>{ setInterval(pollPackets,1000); });
</script>
</body>
</html>

View File

@@ -0,0 +1,36 @@
// Shared port label/color definitions for UI pages.
window.PORT_LABEL_MAP = {
0: "UNKNOWN",
1: "Text",
3: "Position",
4: "Node Info",
5: "Routing",
6: "Admin",
8: "Waypoint",
35: "Store Forward++",
65: "Store & Forward",
67: "Telemetry",
70: "Traceroute",
71: "Neighbor",
73: "Map Report",
};
window.PORT_COLOR_MAP = {
0: "#6c757d",
1: "#007bff",
3: "#28a745",
4: "#ffc107",
5: "#dc3545",
6: "#20c997",
8: "#fd7e14",
35: "#8bc34a",
65: "#6610f2",
67: "#17a2b8",
70: "#ff4444",
71: "#ff66cc",
73: "#9999ff",
};
// Aliases for pages that expect different names.
window.PORT_MAP = window.PORT_LABEL_MAP;
window.PORT_COLORS = window.PORT_COLOR_MAP;

View File

@@ -1,9 +1,14 @@
from sqlalchemy import select, func
import logging
from datetime import datetime, timedelta, timezone
from sqlalchemy import Text, and_, cast, func, or_, select
from sqlalchemy.orm import lazyload
from meshview import database
from meshview.models import Packet, PacketSeen, Node, Traceroute
from sqlalchemy import text
from datetime import datetime, timedelta
from meshview import database, models
from meshview.models import Node, Packet, PacketSeen, Traceroute
logger = logging.getLogger(__name__)
async def get_node(node_id):
async with database.async_session() as session:
@@ -22,30 +27,63 @@ async def get_fuzzy_nodes(query):
return result.scalars()
async def get_packets(node_id=None, portnum=None, after=None, before=None, limit=None):
async def get_packets(
from_node_id=None,
to_node_id=None,
node_id=None, # legacy
portnum=None,
after=None,
contains=None, # substring search
limit=50,
):
async with database.async_session() as session:
q = select(Packet)
stmt = select(models.Packet)
conditions = []
if node_id:
q = q.where(
(Packet.from_node_id == node_id) | (Packet.to_node_id == node_id)
# Strict FROM filter
if from_node_id is not None:
conditions.append(models.Packet.from_node_id == from_node_id)
# Strict TO filter
if to_node_id is not None:
conditions.append(models.Packet.to_node_id == to_node_id)
# Legacy node_id (either direction)
if node_id is not None:
conditions.append(
or_(
models.Packet.from_node_id == node_id,
models.Packet.to_node_id == node_id,
)
)
if portnum:
q = q.where(Packet.portnum == portnum)
if after:
q = q.where(Packet.import_time > after)
if before:
q = q.where(Packet.import_time < before)
q = q.order_by(Packet.import_time.desc())
# Port filter
if portnum is not None:
conditions.append(models.Packet.portnum == portnum)
if limit is not None:
q = q.limit(limit)
# Timestamp filter using microseconds
if after is not None:
conditions.append(models.Packet.import_time_us > after)
result = await session.execute(q)
packets = list(result.scalars())
return packets
# Case-insensitive substring search on payload (BLOB → TEXT)
if contains:
contains_lower = f"%{contains.lower()}%"
payload_text = cast(models.Packet.payload, Text)
conditions.append(func.lower(payload_text).like(contains_lower))
# Apply WHERE conditions
if conditions:
stmt = stmt.where(and_(*conditions))
# Order by newest first
stmt = stmt.order_by(models.Packet.import_time_us.desc())
# Limit
stmt = stmt.limit(limit)
# Run query
result = await session.execute(stmt)
return result.scalars().all()
async def get_packets_from(node_id=None, portnum=None, since=None, limit=500):
@@ -53,14 +91,14 @@ async def get_packets_from(node_id=None, portnum=None, since=None, limit=500):
q = select(Packet)
if node_id:
q = q.where(
Packet.from_node_id == node_id
)
q = q.where(Packet.from_node_id == node_id)
if portnum:
q = q.where(Packet.portnum == portnum)
if since:
q = q.where(Packet.import_time > (datetime.now() - since))
result = await session.execute(q.limit(limit).order_by(Packet.import_time.desc()))
now_us = int(datetime.now().timestamp() * 1_000_000)
start_us = now_us - int(since.total_seconds() * 1_000_000)
q = q.where(Packet.import_time_us > start_us)
result = await session.execute(q.limit(limit).order_by(Packet.import_time_us.desc()))
return result.scalars()
@@ -71,21 +109,12 @@ async def get_packet(packet_id):
return result.scalar_one_or_none()
async def get_uplinked_packets(node_id, portnum=None):
async with database.async_session() as session:
q = select(Packet).join(PacketSeen).where(PacketSeen.node_id == node_id).order_by(Packet.import_time.desc()).limit(500)
if portnum:
q = q.where(Packet.portnum == portnum)
result = await session.execute(q)
return result.scalars()
async def get_packets_seen(packet_id):
async with database.async_session() as session:
result = await session.execute(
select(PacketSeen)
.where(PacketSeen.packet_id == packet_id)
.order_by(PacketSeen.import_time.desc())
.order_by(PacketSeen.import_time_us.desc())
)
return result.scalars()
@@ -93,41 +122,51 @@ async def get_packets_seen(packet_id):
async def has_packets(node_id, portnum):
async with database.async_session() as session:
return bool(
(await session.execute(
(
await session.execute(
select(Packet.id).where(Packet.from_node_id == node_id).limit(1)
)).scalar()
)
).scalar()
)
async def get_traceroute(packet_id):
async with database.async_session() as session:
result = await session.execute(
select(Traceroute)
.where(Traceroute.packet_id == packet_id)
.order_by(Traceroute.import_time)
select(Traceroute)
.where(Traceroute.packet_id == packet_id)
.order_by(Traceroute.import_time_us)
)
return result.scalars()
async def get_traceroutes(since):
if isinstance(since, datetime):
since_us = int(since.timestamp() * 1_000_000)
else:
since_us = int(since)
async with database.async_session() as session:
result = await session.execute(
select(Traceroute)
.join(Packet)
.where(Traceroute.import_time > (datetime.now() - since))
.order_by(Traceroute.import_time)
stmt = (
select(Traceroute)
.where(Traceroute.import_time_us > since_us)
.order_by(Traceroute.import_time_us)
)
return result.scalars()
stream = await session.stream_scalars(stmt)
async for tr in stream:
yield tr
async def get_mqtt_neighbors(since):
now_us = int(datetime.now().timestamp() * 1_000_000)
start_us = now_us - int(since.total_seconds() * 1_000_000)
async with database.async_session() as session:
result = await session.execute(select(PacketSeen, Packet)
result = await session.execute(
select(PacketSeen, Packet)
.join(Packet)
.where(
(PacketSeen.hop_limit == PacketSeen.hop_start)
& (PacketSeen.hop_start != 0)
& (PacketSeen.import_time > (datetime.now() - since))
& (PacketSeen.import_time_us > start_us)
)
.options(
lazyload(Packet.from_node),
@@ -137,29 +176,12 @@ async def get_mqtt_neighbors(since):
return result
# We count the total amount of packages
# This is to be used by /stats in web.py
async def get_total_packet_count():
async with database.async_session() as session:
q = select(func.count(Packet.id)) # Use SQLAlchemy's func to count packets
result = await session.execute(q)
return result.scalar() # Return the total count of packets
# We count the total amount of seen packets
async def get_total_packet_seen_count():
async with database.async_session() as session:
q = select(func.count(PacketSeen.node_id)) # Use SQLAlchemy's func to count nodes
result = await session.execute(q)
return result.scalar() # Return the` total count of seen packets
async def get_total_node_count(channel: str = None) -> int:
try:
async with database.async_session() as session:
q = select(func.count(Node.id)).where(
Node.last_update > datetime.now() - timedelta(days=1)
)
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000)
cutoff_us = now_us - 86400 * 1_000_000
q = select(func.count(Node.id)).where(Node.last_seen_us > cutoff_us)
if channel:
q = q.where(Node.channel == channel)
@@ -174,33 +196,44 @@ async def get_total_node_count(channel: str = None) -> int:
async def get_top_traffic_nodes():
try:
async with database.async_session() as session:
result = await session.execute(text("""
SELECT
n.node_id,
n.long_name,
n.short_name,
n.channel,
COUNT(DISTINCT p.id) AS total_packets_sent,
COUNT(ps.packet_id) AS total_times_seen
FROM node n
LEFT JOIN packet p ON n.node_id = p.from_node_id
AND p.import_time >= DATETIME('now', 'localtime', '-24 hours')
LEFT JOIN packet_seen ps ON p.id = ps.packet_id
GROUP BY n.node_id, n.long_name, n.short_name
HAVING total_packets_sent > 0
ORDER BY total_times_seen DESC;
"""))
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000)
cutoff_us = now_us - 86400 * 1_000_000
total_packets_sent = func.count(func.distinct(Packet.id)).label("total_packets_sent")
total_times_seen = func.count(PacketSeen.packet_id).label("total_times_seen")
rows = result.fetchall()
stmt = (
select(
Node.node_id,
Node.long_name,
Node.short_name,
Node.channel,
total_packets_sent,
total_times_seen,
)
.select_from(Node)
.outerjoin(
Packet,
(Packet.from_node_id == Node.node_id) & (Packet.import_time_us >= cutoff_us),
)
.outerjoin(PacketSeen, PacketSeen.packet_id == Packet.id)
.group_by(Node.node_id, Node.long_name, Node.short_name, Node.channel)
.having(total_packets_sent > 0)
.order_by(total_times_seen.desc())
)
nodes = [{
'node_id': row[0],
'long_name': row[1],
'short_name': row[2],
'channel': row[3],
'total_packets_sent': row[4],
'total_times_seen': row[5]
} for row in rows]
rows = (await session.execute(stmt)).all()
nodes = [
{
'node_id': row[0],
'long_name': row[1],
'short_name': row[2],
'channel': row[3],
'total_packets_sent': row[4],
'total_times_seen': row[5],
}
for row in rows
]
return nodes
except Exception as e:
@@ -208,32 +241,32 @@ async def get_top_traffic_nodes():
return []
async def get_node_traffic(node_id: int):
try:
async with database.async_session() as session:
result = await session.execute(
text("""
SELECT
node.long_name, packet.portnum,
COUNT(*) AS packet_count
FROM packet
JOIN node ON packet.from_node_id = node.node_id
WHERE node.node_id = :node_id
AND packet.import_time >= DATETIME('now', 'localtime', '-24 hours')
GROUP BY packet.portnum
ORDER BY packet_count DESC;
"""), {"node_id": node_id}
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000)
cutoff_us = now_us - 86400 * 1_000_000
packet_count = func.count().label("packet_count")
stmt = (
select(Node.long_name, Packet.portnum, packet_count)
.select_from(Packet)
.join(Node, Packet.from_node_id == Node.node_id)
.where(Node.node_id == node_id)
.where(Packet.import_time_us >= cutoff_us)
.group_by(Node.long_name, Packet.portnum)
.order_by(packet_count.desc())
)
# Map the result to include node.long_name and packet data
traffic_data = [{
"long_name": row[0], # node.long_name
"portnum": row[1], # packet.portnum
"packet_count": row[2] # COUNT(*) as packet_count
} for row in result.all()]
return traffic_data
result = await session.execute(stmt)
return [
{
"long_name": row.long_name,
"portnum": row.portnum,
"packet_count": row.packet_count,
}
for row in result.all()
]
except Exception as e:
# Log the error or handle it as needed
@@ -241,12 +274,12 @@ async def get_node_traffic(node_id: int):
return []
async def get_nodes(role=None, channel=None, hw_model=None, days_active=None):
async def get_nodes(node_id=None, role=None, channel=None, hw_model=None, days_active=None):
"""
Fetches nodes from the database based on optional filtering criteria.
Parameters:
node_id
role (str, optional): The role of the node (converted to uppercase for consistency).
channel (str, optional): The communication channel associated with the node.
hw_model (str, optional): The hardware model of the node.
@@ -256,12 +289,18 @@ async def get_nodes(role=None, channel=None, hw_model=None, days_active=None):
"""
try:
async with database.async_session() as session:
#print(channel) # Debugging output (consider replacing with logging)
# print(channel) # Debugging output (consider replacing with logging)
# Start with a base query selecting all nodes
query = select(Node)
# Apply filters based on provided parameters
if node_id is not None:
try:
node_id_int = int(node_id)
except (TypeError, ValueError):
node_id_int = node_id
query = query.where(Node.node_id == node_id_int)
if role is not None:
query = query.where(Node.role == role.upper()) # Ensure role is uppercase
if channel is not None:
@@ -270,10 +309,12 @@ async def get_nodes(role=None, channel=None, hw_model=None, days_active=None):
query = query.where(Node.hw_model == hw_model)
if days_active is not None:
query = query.where(Node.last_update > datetime.now() - timedelta(days_active))
now_us = int(datetime.now(timezone.utc).timestamp() * 1_000_000)
cutoff_us = now_us - int(timedelta(days_active).total_seconds() * 1_000_000)
query = query.where(Node.last_seen_us > cutoff_us)
# Exclude nodes where last_update is an empty string
query = query.where(Node.last_update != "")
# Exclude nodes with missing last_seen_us
query = query.where(Node.last_seen_us.is_not(None))
# Order results by long_name in ascending order
query = query.order_by(Node.short_name.asc())
@@ -283,8 +324,8 @@ async def get_nodes(role=None, channel=None, hw_model=None, days_active=None):
nodes = result.scalars().all()
return nodes # Return the list of nodes
except Exception as e:
print("error reading DB") # Consider using logging instead of print
except Exception:
logger.exception("error reading DB")
return [] # Return an empty list in case of failure
@@ -294,27 +335,38 @@ async def get_packet_stats(
channel: str | None = None,
portnum: int | None = None,
to_node: int | None = None,
from_node: int | None = None
from_node: int | None = None,
):
now = datetime.now()
now = datetime.now(timezone.utc)
if period_type == "hour":
start_time = now - timedelta(hours=length)
time_format = '%Y-%m-%d %H:00'
time_format_sqlite = "%Y-%m-%d %H:00"
time_format_pg = "YYYY-MM-DD HH24:00"
elif period_type == "day":
start_time = now - timedelta(days=length)
time_format = '%Y-%m-%d'
time_format_sqlite = "%Y-%m-%d"
time_format_pg = "YYYY-MM-DD"
else:
raise ValueError("period_type must be 'hour' or 'day'")
async with database.async_session() as session:
q = (
select(
func.strftime(time_format, Packet.import_time).label('period'),
func.count().label('count')
dialect = session.get_bind().dialect.name
if dialect == "postgresql":
period_expr = func.to_char(
func.to_timestamp(Packet.import_time_us / 1_000_000.0),
time_format_pg,
)
.where(Packet.import_time >= start_time)
)
else:
period_expr = func.strftime(
time_format_sqlite,
func.datetime(Packet.import_time_us / 1_000_000, "unixepoch"),
)
q = select(
period_expr.label("period"),
func.count().label("count"),
).where(Packet.import_time_us >= int(start_time.timestamp() * 1_000_000))
# Filters
if channel:
@@ -338,5 +390,161 @@ async def get_packet_stats(
"portnum": portnum,
"to_node": to_node,
"from_node": from_node,
"data": data
"data": data,
}
async def get_channels_in_period(period_type: str = "hour", length: int = 24):
"""
Returns a sorted list of distinct channels used in packets over a given period.
period_type: "hour" or "day"
length: number of hours or days to look back
"""
now_us = int(datetime.utcnow().timestamp() * 1_000_000)
if period_type == "hour":
delta_us = length * 3600 * 1_000_000
elif period_type == "day":
delta_us = length * 86400 * 1_000_000
else:
raise ValueError("period_type must be 'hour' or 'day'")
start_us = now_us - delta_us
async with database.async_session() as session:
stmt = (
select(Packet.channel)
.where(Packet.import_time_us >= start_us)
.distinct()
.order_by(Packet.channel)
)
result = await session.execute(stmt)
channels = [ch for ch in result.scalars().all() if ch is not None]
return channels
async def get_total_packet_count(
period_type: str | None = None,
length: int | None = None,
channel: str | None = None,
from_node: int | None = None,
to_node: int | None = None,
):
"""
Count total packets, with ALL filters optional.
If no filters -> return ALL packets ever.
Uses import_time_us (microseconds).
"""
# CASE 1: no filters -> count everything
if (
period_type is None
and length is None
and channel is None
and from_node is None
and to_node is None
):
async with database.async_session() as session:
q = select(func.count(Packet.id))
res = await session.execute(q)
return res.scalar() or 0
# CASE 2: filtered mode -> compute time window using import_time_us
now_us = int(datetime.now().timestamp() * 1_000_000)
if period_type is None:
period_type = "day"
if length is None:
length = 1
if period_type == "hour":
start_time_us = now_us - (length * 3600 * 1_000_000)
elif period_type == "day":
start_time_us = now_us - (length * 86400 * 1_000_000)
else:
raise ValueError("period_type must be 'hour' or 'day'")
async with database.async_session() as session:
q = select(func.count(Packet.id)).where(Packet.import_time_us >= start_time_us)
if channel:
q = q.where(func.lower(Packet.channel) == channel.lower())
if from_node:
q = q.where(Packet.from_node_id == from_node)
if to_node:
q = q.where(Packet.to_node_id == to_node)
res = await session.execute(q)
return res.scalar() or 0
async def get_total_packet_seen_count(
packet_id: int | None = None,
period_type: str | None = None,
length: int | None = None,
channel: str | None = None,
from_node: int | None = None,
to_node: int | None = None,
):
"""
Count total PacketSeen rows.
- If packet_id is provided -> count only that packet's seen entries.
- Otherwise match EXACT SAME FILTERS as get_total_packet_count.
Uses import_time_us for time window.
"""
# SPECIAL CASE: direct packet_id lookup
if packet_id is not None:
async with database.async_session() as session:
q = select(func.count(PacketSeen.packet_id)).where(PacketSeen.packet_id == packet_id)
res = await session.execute(q)
return res.scalar() or 0
# No filters -> return ALL seen entries
if (
period_type is None
and length is None
and channel is None
and from_node is None
and to_node is None
):
async with database.async_session() as session:
q = select(func.count(PacketSeen.packet_id))
res = await session.execute(q)
return res.scalar() or 0
# Compute time window
now_us = int(datetime.now().timestamp() * 1_000_000)
if period_type is None:
period_type = "day"
if length is None:
length = 1
if period_type == "hour":
start_time_us = now_us - (length * 3600 * 1_000_000)
elif period_type == "day":
start_time_us = now_us - (length * 86400 * 1_000_000)
else:
raise ValueError("period_type must be 'hour' or 'day'")
# JOIN Packet so we can apply identical filters
async with database.async_session() as session:
q = (
select(func.count(PacketSeen.packet_id))
.join(Packet, Packet.id == PacketSeen.packet_id)
.where(Packet.import_time_us >= start_time_us)
)
if channel:
q = q.where(func.lower(Packet.channel) == channel.lower())
if from_node:
q = q.where(Packet.from_node_id == from_node)
if to_node:
q = q.where(Packet.to_node_id == to_node)
res = await session.execute(q)
return res.scalar() or 0

View File

@@ -1,26 +1,27 @@
<!doctype html>
<html lang="en" data-bs-theme="dark">
<head>
<title>
Meshview - {{ site_config.get("site", {}).get("title", "") }}
{% if node and node.short_name %}-- {{ node.short_name }}{% endif %}
</title>
<title>Meshview</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Scripts -->
<script src="https://unpkg.com/htmx.org@1.9.11" integrity="sha384-0gxUXCCR8yv9FM2b+U3FDbsKthCI66oH5IA9fHppQq9DDMHuMauqq1ZHBpJxQ0J0" crossorigin="anonymous"></script>
<script src="https://unpkg.com/htmx.org@1.9.11/dist/ext/sse.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-YvpcrYf0tY3lHB60NNkmXc5s9fDVZLESaAA55NDzOxhy9GkcIdslK1eN7N6jIeHz" crossorigin="anonymous"></script>
<!-- Stylesheets -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY=" crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js" integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo=" crossorigin=""></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js" crossorigin="anonymous"></script>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous">
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js" crossorigin=""></script>
{% block head %}{% endblock %}
<style>
body {
opacity: 0;
transition: opacity 0.3s ease-in-out;
}
body.ready {
opacity: 1;
}
.htmx-indicator {
opacity: 0;
transition: opacity 500ms ease-in;
@@ -28,47 +29,174 @@
.htmx-request .htmx-indicator {
opacity: 1;
}
#search_form {
z-index: 4000;
}
#details_map {
width: 100%;
height: 500px;
}
{% block css %}{% endblock %}
</style>
</head>
<body>
{% set site = site_config.get("site", {}) %}
<br>
<div style="text-align:center" id="site-header"></div>
<div style="text-align:center" id="site-message"></div>
<div style="text-align:center" id="site-menu"></div>
<br>
<div style="text-align:center">
<strong>{{ site.get("title", "") }} {{ site.get("domain", "") }}</strong>
</div>
<div style="text-align: center;">
{{ site.get("message", "") }}
</div>
<br>
<div style="text-align:center">
{% if site.get("nodes") == "True" %}<a href="/nodelist">Nodes</a>{% endif %}
{% if site.get("conversations") == "True" %}&nbsp;-&nbsp;<a href="/chat">Conversations</a>{% endif %}
{% if site.get("everything") == "True" %}&nbsp;-&nbsp;<a href="/firehose">See <strong>everything</strong></a>{% endif %}
{% if site.get("graphs") == "True" %}&nbsp;-&nbsp;<a href="/nodegraph">Mesh Graphs</a>{% endif %}
{% if site.get("net") == "True" %}&nbsp;-&nbsp;<a href="/net">Weekly Net</a>{% endif %}
{% if site.get("map") == "True" %}&nbsp;-&nbsp;<a href="/map">Live Map</a>{% endif %}
{% if site.get("stats") == "True" %}&nbsp;-&nbsp;<a href="/stats">Stats</a>{% endif %}
{% if site.get("top") == "True" %}&nbsp;-&nbsp;<a href="/top">Top Traffic</a>{% endif %}
</div>
{% block body %}{% endblock %}
{% include "search_form.html" %}
<br>
<div style="text-align:center" id="footer" data-translate="footer"></div>
<div style="text-align:center"><small id="site-version">ver. unknown</small></div>
<br>
{% block body %}{% endblock %}
<script>
// --- Shared Promises ---
if (!window._siteConfigPromise) {
window._siteConfigPromise = (async () => {
try {
const res = await fetch("/api/config");
const cfg = await res.json();
window._siteConfig = cfg;
console.log("Loaded config:", cfg);
return cfg;
} catch (err) {
console.error("Failed to load /api/config:", err);
return {};
}
})();
}
// --- Load language AFTER config ---
if (!window._langPromise) {
window._langPromise = (async () => {
try {
const cfg = await window._siteConfigPromise;
const site = cfg.site || {};
const userLang = site.language || "en";
const section = "base";
const url = `/api/lang?lang=${userLang}&section=${section}`;
const res = await fetch(url);
const lang = await res.json();
window._lang = lang;
console.log(`Loaded language (${userLang}):`, lang);
return lang;
} catch (err) {
console.error("Failed to load language:", err);
return {};
}
})();
}
// --- Translation Helper ---
function applyTranslations(dict) {
document.querySelectorAll("[data-translate]").forEach(el => {
const key = el.dataset.translate;
const value = dict[key];
if (!value) return;
if (el.placeholder) {
el.placeholder = value;
} else if (el.tagName === "INPUT" && el.value) {
el.value = value;
} else if (key === "footer") {
el.innerHTML = value;
} else {
el.textContent = value;
}
});
}
// --- Fill portnum select dynamically ---
function fillPortnumSelect(dict, selectedValue) {
const sel = document.getElementById("portnum_select");
if (!sel) return;
const portOptions = dict.portnum_options || {};
sel.innerHTML = "";
const allOption = document.createElement("option");
allOption.value = "";
allOption.textContent = dict.all || "All";
if (!selectedValue) allOption.selected = true;
sel.appendChild(allOption);
for (const [val, label] of Object.entries(portOptions)) {
const opt = document.createElement("option");
opt.value = val;
opt.textContent = label;
if (parseInt(val) === parseInt(selectedValue)) {
opt.selected = true;
}
sel.appendChild(opt);
}
}
// --- Main Init ---
async function initializePage() {
try {
const [cfg, lang] = await Promise.all([
window._siteConfigPromise,
window._langPromise
]);
const dict = lang || {};
const site = cfg.site || {};
// Title
document.title = "Meshview - " + (site.title || "");
// Header & Message
document.getElementById("site-header").innerHTML =
`<strong>${site.title || ""} ${site.domain ? "(" + site.domain + ")" : ""}</strong>`;
document.getElementById("site-message").textContent = site.message || "";
// Menu
const menu = document.getElementById("site-menu");
if (menu) {
const items = [];
const keys = ["nodes", "chat", "everything", "graphs", "net", "map", "stats", "top"];
const urls = ["/nodelist", "/chat", "/firehose", "/nodegraph", "/net", "/map", "/stats", "/top"];
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
if (site[key] === "true") {
items.push(`<a href="${urls[i]}">${dict[key] || key}</a>`);
}
}
menu.innerHTML = items.join("&nbsp;-&nbsp;");
}
// Version
document.getElementById("site-version").textContent =
"ver. " + (site.version || "unknown");
// Apply translations
applyTranslations(dict);
fillPortnumSelect(dict, "{{ portnum or '' }}");
document.body.classList.add("ready");
} catch (err) {
console.error("Failed to initialize page:", err);
document.body.classList.add("ready");
}
}
document.addEventListener("DOMContentLoaded", initializePage);
</script>
<br>
<div style="text-align:center">
Visit <strong><a href="https://github.com/pablorevilla-meshtastic/meshview">Meshview</a></strong> on Github.
<small>ver. {{ SOFTWARE_RELEASE | default("unknown") }}</small>
</div>
<br>
</body>
</html>

View File

@@ -1,16 +0,0 @@
<div id="buttons" class="btn-group" role="group">
<a
role="button"
class="btn {{ 'btn-primary' if packet_event == 'packet' else 'btn-secondary'}}"
href="/packet_list/{{node_id}}?{{query_string}}"
>
TX/RX
</a>
<a
role="button"
class="btn {{ 'btn-primary' if packet_event == 'uplinked' else 'btn-secondary'}}"
href="/uplinked_list/{{node_id}}?{{query_string}}"
>
Uplinked
</a>
</div>

View File

@@ -3,176 +3,242 @@
{% block css %}
.timestamp {
min-width: 10em;
color: #ccc;
}
.chat-packet:nth-of-type(odd) {
background-color: #3a3a3a;
}
.chat-packet:nth-of-type(odd) { background-color: #3a3a3a; }
.chat-packet {
border-bottom: 1px solid #555;
padding: 8px;
border-radius: 8px;
padding: 3px 6px;
border-radius: 6px;
margin: 0;
}
.chat-packet:nth-of-type(even) {
background-color: #333333;
/* Same column spacing as before */
.chat-packet > [class^="col-"] {
padding-left: 10px !important;
padding-right: 10px !important;
padding-top: 1px !important;
padding-bottom: 1px !important;
}
.chat-packet:nth-of-type(even) { background-color: #333333; }
.channel {
font-style: italic;
color: #bbb;
}
.channel a {
font-style: normal;
color: #999;
}
@keyframes flash {
0% { background-color: #ffe066; }
100% { background-color: inherit; }
}
.chat-packet.flash {
animation: flash 3.5s ease-out;
}
.chat-packet.flash { animation: flash 3.5s ease-out; }
/* Nested reply style below the message */
.replying-to {
font-size: 0.85em;
color: #aaa; /* gray text */
margin-top: 4px;
padding-left: 20px; /* increased indentation */
.replying-to .reply-preview {
font-size: 0.8em;
color: #aaa;
margin-top: 2px;
padding-left: 10px;
}
}
.replying-to .reply-preview { color: #aaa; }
{% endblock %}
{% block body %}
<div id="chat-container">
<div class="container" id="chat-log">
<div id="chat-container" class="mt-3">
<!-- ⭐ CHAT TITLE WITH ICON, aligned to container ⭐ -->
<div class="container px-2">
<h2 style="color:white; margin:0 0 10px 0;">
<span class="icon">💬</span>
<span data-translate="chat_title"></span>
</h2>
</div>
<div class="container" id="chat-log"></div>
</div>
<script>
const chatContainer = document.querySelector("#chat-log");
let lastTime = null;
const renderedPacketIds = new Set();
const packetMap = new Map(); // store all packets weve seen
document.addEventListener("DOMContentLoaded", async () => {
const chatContainer = document.querySelector("#chat-log");
if (!chatContainer) return console.error("#chat-log not found");
function escapeHtml(text) {
const div = document.createElement("div");
div.textContent = text == null ? "" : text;
return div.innerHTML;
}
let lastTime = null;
const renderedPacketIds = new Set();
const packetMap = new Map();
let chatLang = {};
function renderPacket(packet, highlight = false) {
// prevent duplicates
if (renderedPacketIds.has(packet.id)) return;
renderedPacketIds.add(packet.id);
packetMap.set(packet.id, packet);
/* ==========================================================
TRANSLATIONS FOR CHAT PAGE
========================================================== */
function applyTranslations(dict, root=document) {
root.querySelectorAll("[data-translate]").forEach(el => {
const key = el.dataset.translate;
const val = dict[key];
if (!val) return;
if (el.placeholder) el.placeholder = val;
else el.textContent = val;
});
}
const date = new Date(packet.import_time);
const formattedTime = date.toLocaleTimeString([], {
hour: "numeric", minute: "2-digit", second: "2-digit", hour12: true
});
const formattedDate = `${(date.getMonth() + 1).toString().padStart(2,"0")}/` +
`${date.getDate().toString().padStart(2,"0")}/` +
`${date.getFullYear()}`;
const formattedTimestamp = `${formattedTime} - ${formattedDate}`;
async function loadChatLang() {
try {
const cfg = await window._siteConfigPromise;
const langCode = cfg?.site?.language || "en";
const res = await fetch(`/api/lang?lang=${langCode}&section=chat`);
chatLang = await res.json();
// Try to resolve the reply target
let replyHtml = "";
if (packet.reply_id) {
const parent = packetMap.get(packet.reply_id);
if (parent) {
replyHtml = `
<div class="replying-to">
<div class="reply-preview">
<i>Replying to: <strong>${escapeHtml((parent.long_name || "").trim() || `Node ${parent.from_node_id}`)}</strong>:
${escapeHtml(parent.payload || "")}</i>
</div>
</div>
// Apply to existing DOM
applyTranslations(chatLang);
} catch (err) {
console.error("Chat translation load failed:", err);
}
}
/* ==========================================================
SAFE HTML
========================================================== */
function escapeHtml(text) {
const div = document.createElement("div");
div.textContent = text ?? "";
return div.innerHTML;
}
/* ==========================================================
RENDERING PACKETS
========================================================== */
function renderPacket(packet, highlight = false) {
if (renderedPacketIds.has(packet.id)) return;
renderedPacketIds.add(packet.id);
packetMap.set(packet.id, packet);
let date;
if (packet.import_time_us && packet.import_time_us > 0) {
date = new Date(packet.import_time_us / 1000);
} else if (packet.import_time) {
date = new Date(packet.import_time);
} else {
date = new Date();
}
const formattedTime = date.toLocaleTimeString([], {
hour:"numeric",
minute:"2-digit",
second:"2-digit",
hour12:true
});
const formattedDate =
`${(date.getMonth()+1).toString().padStart(2,"0")}/` +
`${date.getDate().toString().padStart(2,"0")}/` +
`${date.getFullYear()}`;
const formattedTimestamp = `${formattedTime} - ${formattedDate}`;
let replyHtml = "";
if (packet.reply_id) {
const parent = packetMap.get(packet.reply_id);
const replyPrefix = `<i data-translate="replying_to"></i>`;
if (parent) {
replyHtml = `
<div class="replying-to">
${replyPrefix}
<strong>${escapeHtml((parent.long_name || "").trim() || `Node ${parent.from_node_id}`)}</strong>:
${escapeHtml(parent.payload || "")}
</div>`;
} else {
replyHtml = `
<div class="replying-to">
${replyPrefix}
<a href="/packet/${packet.reply_id}">${packet.reply_id}</a>
</div>`;
}
}
const div = document.createElement("div");
div.className = "row chat-packet" + (highlight ? " flash" : "");
div.dataset.packetId = packet.id;
div.innerHTML = `
<span class="col-2 timestamp" title="${packet.import_time_us}">
${formattedTimestamp}
</span>
<span class="col-2 channel">
<a href="/packet/${packet.id}" title="${chatLang.view_packet_details || 'View details'}">🔎</a>
${escapeHtml(packet.channel || "")}
</span>
<span class="col-3 nodename">
<a href="/node/${packet.from_node_id}">
${escapeHtml((packet.long_name || "").trim() || `Node ${packet.from_node_id}`)}
</a>
</span>
<span class="col-5 message">
${escapeHtml(packet.payload)}${replyHtml}
</span>
`;
} else {
// fallback if parent not loaded yet
replyHtml = `
<div class="replying-to">
<i>Replying to: <a href="/packet/${packet.reply_id}">${packet.reply_id}</a></i>
</div>
`;
}
chatContainer.prepend(div);
// Translate newly added DOM
applyTranslations(chatLang, div);
if (highlight) setTimeout(() => div.classList.remove("flash"), 2500);
}
const div = document.createElement("div");
div.className = "row chat-packet" + (highlight ? " flash" : "");
div.dataset.packetId = packet.id;
div.innerHTML = `
<span class="col-2 timestamp" title="${packet.import_time}">
${formattedTimestamp}
</span>
<span class="col-2 channel">
<a href="/packet/${packet.id}" title="View packet details">✉️</a> ${escapeHtml(packet.channel || "")}
</span>
<span class="col-3 nodename">
<a href="/packet_list/${packet.from_node_id}">
${escapeHtml((packet.long_name || "").trim() || `Node ${packet.from_node_id}`)}
</a>
</span>
<span class="col-5 message">
${escapeHtml(packet.payload)}
${replyHtml}
</span>
`;
// Prepend so newest messages are at the top.
chatContainer.prepend(div);
if (highlight) setTimeout(() => div.classList.remove("flash"), 2500);
}
function renderPacketsEnsureDescending(packets, highlight = false) {
if (!Array.isArray(packets) || packets.length === 0) return;
const sortedDesc = packets.slice().sort((a, b) =>
new Date(b.import_time) - new Date(a.import_time)
);
for (let i = sortedDesc.length - 1; i >= 0; i--) {
renderPacket(sortedDesc[i], highlight);
}
}
async function fetchInitial() {
try {
const url = new URL("/api/chat", window.location.origin);
url.searchParams.set("limit", "100");
const resp = await fetch(url);
const data = await resp.json();
if (data && data.packets && data.packets.length > 0) {
renderPacketsEnsureDescending(data.packets, false);
if (data.latest_import_time) lastTime = data.latest_import_time;
function renderPacketsEnsureDescending(packets, highlight=false) {
if (!Array.isArray(packets) || packets.length===0) return;
const sortedDesc = packets.slice().sort((a,b)=>{
const aTime = a.import_time_us || (new Date(a.import_time).getTime() * 1000);
const bTime = b.import_time_us || (new Date(b.import_time).getTime() * 1000);
return bTime - aTime;
});
for (let i=sortedDesc.length-1; i>=0; i--) {
renderPacket(sortedDesc[i], highlight);
}
}
} catch (err) {
console.error("Initial fetch error:", err);
}
}
async function fetchUpdates() {
try {
const url = new URL("/api/chat", window.location.origin);
url.searchParams.set("limit", "100");
if (lastTime) url.searchParams.set("since", lastTime);
const resp = await fetch(url);
const data = await resp.json();
if (data && data.packets && data.packets.length > 0) {
renderPacketsEnsureDescending(data.packets, true);
if (data.latest_import_time) lastTime = data.latest_import_time;
/* ==========================================================
FETCHING PACKETS
========================================================== */
async function fetchInitial() {
try {
const resp = await fetch("/api/packets?portnum=1&limit=100");
const data = await resp.json();
if (data?.packets?.length) renderPacketsEnsureDescending(data.packets);
lastTime = data?.latest_import_time || lastTime;
} catch(err){
console.error("Initial fetch error:", err);
}
}
} catch (err) {
console.error("Fetch updates error:", err);
}
}
// initial load
fetchInitial();
setInterval(fetchUpdates, 5000);
async function fetchUpdates() {
try {
const url = new URL("/api/packets?portnum=1", window.location.origin);
url.searchParams.set("limit","100");
if (lastTime) url.searchParams.set("since", lastTime);
const resp = await fetch(url);
const data = await resp.json();
if (data?.packets?.length) renderPacketsEnsureDescending(data.packets, true);
lastTime = data?.latest_import_time || lastTime;
} catch(err){
console.error("Fetch updates error:", err);
}
}
/* ==========================================================
INIT
========================================================== */
await loadChatLang(); // load translations FIRST
await fetchInitial(); // then fetch initial packets
setInterval(fetchUpdates, 5000);
});
</script>
{% endblock %}

View File

@@ -1,7 +0,0 @@
<datalist
id="node_options"
>
{% for option in node_options %}
<option value="{{option.id}}">{{option.id}} -- {{option.long_name}} ({{option.short_name}})</option>
{% endfor %}
</datalist>

View File

@@ -1,100 +1,403 @@
{% extends "base.html" %}
{% block css %}
.container {
max-width: 900px;
margin: 0 auto;
}
.container {
margin: 0 auto;
padding: 10px;
}
#pause-button {
white-space: nowrap;
padding: 4px 10px;
font-size: 0.9rem;
border-radius: 6px;
}
#pause-button {
white-space: nowrap;
padding: 2px 8px;
font-size: 0.85rem;
}
.port-tag {
display: inline-block;
padding: 2px 6px;
border-radius: 6px;
font-size: 0.75rem;
font-weight: 500;
color: #fff;
}
/* Packet table */
.packet-table {
width: 100%;
border-collapse: collapse;
font-size: 0.85rem;
color: #e4e9ee;
}
.packet-table th, .packet-table td {
border: 1px solid #3a3f44;
padding: 6px 10px;
text-align: left;
}
.packet-table th {
background-color: #1f2226;
font-weight: bold;
}
.packet-table tr:nth-of-type(odd) { background-color: #272b2f; }
.packet-table tr:nth-of-type(even) { background-color: #212529; }
/* Port tag */
.port-tag {
display: inline-block;
padding: 1px 6px;
border-radius: 6px;
font-size: 0.75rem;
font-weight: 500;
color: #fff;
}
.to-mqtt { font-style: italic; color: #aaa; }
/* Payload rows */
.payload-row { display: none; background-color: #1b1e22; }
.payload-cell {
padding: 8px 12px;
font-family: monospace;
white-space: pre-wrap;
color: #b0bec5;
}
.packet-table tr.expanded + .payload-row {
display: table-row;
}
/* Toggle arrow */
.toggle-btn {
cursor: pointer;
color: #aaa;
margin-right: 6px;
font-weight: bold;
}
.toggle-btn:hover { color: #fff; }
/* Inline link next to port tag */
.inline-link {
margin-left: 6px;
font-weight: bold;
text-decoration: none;
color: #9fd4ff;
}
.inline-link:hover {
color: #c7e6ff;
}
{% endblock %}
{% block body %}
<div class="container">
<form class="d-flex align-items-center justify-content-between mb-2">
{% set options = {
1: "Text Message",
3: "Position",
4: "Node Info",
67: "Telemetry",
71: "Neighbor Info",
70: "Trace Route",
}
%}
<button type="button" id="pause-button" class="btn btn-sm btn-outline-secondary">Pause</button>
<form class="d-flex align-items-center justify-content-between mb-3">
<h2 class="mb-0" data-translate-lang="live_feed">📡 Live Feed</h2>
<button type="button"
id="pause-button"
class="btn btn-sm btn-outline-secondary"
data-translate-lang="pause">
Pause
</button>
</form>
<div class="row">
<div class="col-xs" id="packet_list">
{% for packet in packets %}
{% include 'packet.html' %}
{% else %}
No packets found.
{% endfor %}
</div>
</div>
<table class="packet-table">
<thead>
<tr>
<th data-translate-lang="time">Time</th>
<th data-translate-lang="packet_id">Packet ID</th>
<th data-translate-lang="from">From</th>
<th data-translate-lang="to">To</th>
<th data-translate-lang="port">Port</th>
</tr>
</thead>
<tbody id="packet_list"></tbody>
</table>
</div>
<script src="/static/portmaps.js"></script>
<script>
let lastTime = null;
let portnum = "{{ portnum if portnum is not none else '' }}";
let updatesPaused = false;
/* ======================================================
FIREHOSE TRANSLATION SYSTEM (isolated from base)
====================================================== */
let firehoseTranslations = {};
// Use firehose_interval from config (seconds), default to 3s if not set
const firehoseInterval = {{ site_config["site"]["firehose_interal"] | default(3) }};
if (firehoseInterval < 0) firehoseInterval = 0;
function applyTranslationsFirehose(translations, root=document) {
root
.querySelectorAll("[data-translate-lang]")
.forEach(el => {
const key = el.dataset.translateLang;
if (!translations[key]) return;
function fetchUpdates() {
if (updatesPaused || firehoseInterval === 0) return;
const url = new URL("/firehose/updates", window.location.origin);
if (lastTime) url.searchParams.set("last_time", lastTime);
if (portnum) url.searchParams.set("portnum", portnum);
fetch(url)
.then(res => res.json())
.then(data => {
if (data.packets && data.packets.length > 0) {
lastTime = data.last_time;
const list = document.getElementById("packet_list");
for (const html of data.packets.reverse()) {
list.insertAdjacentHTML("afterbegin", html);
}
}
})
.catch(err => {
console.error("Update fetch failed:", err);
});
if (el.tagName === "INPUT" && el.placeholder !== undefined) {
el.placeholder = translations[key];
} else {
el.textContent = translations[key];
}
});
}
document.addEventListener("DOMContentLoaded", () => {
const pauseBtn = document.getElementById("pause-button");
async function loadTranslationsFirehose() {
try {
const cfg = await window._siteConfigPromise;
const lang = cfg?.site?.language || "en";
const portnumSelector = document.querySelector('select[name="portnum"]');
if (portnumSelector) {
portnumSelector.addEventListener("change", (e) => {
const selected = e.target.value;
const url = new URL(window.location.href);
url.searchParams.set("portnum", selected);
window.location.href = url;
});
const res = await fetch(`/api/lang?lang=${lang}&section=firehose`);
firehoseTranslations = await res.json();
applyTranslationsFirehose(firehoseTranslations);
} catch (err) {
console.error("Firehose translation load failed:", err);
}
}
/* ======================================================
NODE LOOKUP
====================================================== */
let nodeMap = {};
async function loadNodes() {
try {
const res = await fetch("/api/nodes");
const data = await res.json();
for (const n of data.nodes || []) {
nodeMap[n.node_id] = n.long_name || n.short_name || n.id || n.node_id;
}
nodeMap[4294967295] = firehoseTranslations.all_broadcast || "All";
} catch (err) {
console.error("Failed loading nodes:", err);
}
}
function nodeName(id) {
return nodeMap[id] || id;
}
/* ======================================================
PORT COLORS & NAMES
====================================================== */
const PORT_MAP = window.PORT_MAP || {};
const PORT_COLORS = window.PORT_COLORS || {};
function portLabel(portnum, payload, linksHtml) {
const name = PORT_MAP[portnum] || "Unknown";
const color = PORT_COLORS[portnum] || "#6c757d";
const safePayload = payload
? payload.replace(/"/g, "&quot;")
: "";
return `
<span class="port-tag" style="background-color:${color}" title="${safePayload}">
${name}
</span>
<span class="text-secondary">(${portnum})</span>
${linksHtml || ""}
`;
}
/* ======================================================
TIME FORMAT
====================================================== */
function formatTimes(importTimeUs) {
const ms = Number(importTimeUs) / 1000;
if (!Number.isFinite(ms)) {
return { local: "—", utc: "—", epoch: "—" };
}
const date = new Date(ms);
const local = date.toLocaleTimeString([], {
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZoneName: "short"
});
const utc = date.toLocaleTimeString([], {
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZone: "UTC",
timeZoneName: "short"
});
return { local, utc, epoch: String(importTimeUs) };
}
function logPacketTimes(packet) {
const times = formatTimes(packet.import_time_us);
console.log(
"[firehose] packet time",
"id=" + packet.id,
"epoch_us=" + times.epoch,
"local=" + times.local,
"utc=" + times.utc
);
}
/* ======================================================
FIREHOSE FETCHING
====================================================== */
let lastImportTimeUs = null;
let updatesPaused = false;
let updateInterval = 3000;
async function configureFirehose() {
try {
const cfg = await window._siteConfigPromise;
const sec = cfg?.site?.firehose_interval;
if (sec && !isNaN(sec)) updateInterval = sec * 1000;
} catch {}
}
async function fetchUpdates() {
if (updatesPaused) return;
const url = new URL("/api/packets", window.location.origin);
url.searchParams.set("limit", 100);
if (lastImportTimeUs)
url.searchParams.set("since", lastImportTimeUs);
try {
const res = await fetch(url);
if (!res.ok) return;
const data = await res.json();
const packets = data.packets || [];
if (!packets.length) return;
const list = document.getElementById("packet_list");
for (const pkt of packets.reverse()) {
logPacketTimes(pkt);
/* FROM — includes translation */
const from =
pkt.from_node_id === 4294967295
? `<span class="to-mqtt" data-translate-lang="all_broadcast">
${firehoseTranslations.all_broadcast || "All"}
</span>`
: `<a href="/node/${pkt.from_node_id}" style="text-decoration:underline; color:inherit;">
${nodeMap[pkt.from_node_id] || pkt.from_node_id}
</a>`;
/* TO — includes translation */
const to =
pkt.to_node_id === 1
? `<span class="to-mqtt" data-translate-lang="direct_to_mqtt">
${firehoseTranslations.direct_to_mqtt || "direct to MQTT"}
</span>`
: pkt.to_node_id === 4294967295
? `<span class="to-mqtt" data-translate-lang="all_broadcast">
${firehoseTranslations.all_broadcast || "All"}
</span>`
: `<a href="/node/${pkt.to_node_id}" style="text-decoration:underline; color:inherit;">
${nodeMap[pkt.to_node_id] || pkt.to_node_id}
</a>`;
let inlineLinks = "";
// Position link
if (pkt.portnum === 3 && pkt.payload) {
const latMatch = pkt.payload.match(/latitude_i:\s*(-?\d+)/);
const lonMatch = pkt.payload.match(/longitude_i:\s*(-?\d+)/);
if (latMatch && lonMatch) {
const lat = parseInt(latMatch[1]) / 1e7;
const lon = parseInt(lonMatch[1]) / 1e7;
inlineLinks += ` <a class="inline-link"
href="https://www.google.com/maps?q=${lat},${lon}"
target="_blank">📍</a>`;
}
}
// Traceroute link
if (pkt.portnum === 70) {
let traceId = pkt.id;
const match = pkt.payload.match(/ID:\s*(\d+)/i);
if (match) traceId = match[1];
inlineLinks += ` <a class="inline-link"
href="/graph/traceroute/${traceId}"
target="_blank">⮕</a>`;
}
const safePayload = (pkt.payload || "")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;");
const html = `
<tr class="packet-row">
<td>
${formatTimes(pkt.import_time_us).local}<br>
</td>
<td>
<span class="toggle-btn">▶</span>
<a href="/packet/${pkt.id}"
style="text-decoration:underline; color:inherit;">
${pkt.id}
</a>
</td>
<td>${from}</td>
<td>${to}</td>
<td>${portLabel(pkt.portnum, pkt.payload, inlineLinks)}</td>
</tr>
<tr class="payload-row">
<td colspan="5" class="payload-cell">${safePayload}</td>
</tr>
`;
list.insertAdjacentHTML("afterbegin", html);
}
// Limit table size
while (list.rows.length > 400) list.deleteRow(-1);
lastImportTimeUs = packets[packets.length - 1].import_time_us;
} catch (err) {
console.error("Packet fetch failed:", err);
}
}
/* ======================================================
INITIALIZE PAGE
====================================================== */
document.addEventListener("DOMContentLoaded", async () => {
const pauseBtn = document.getElementById("pause-button");
pauseBtn.addEventListener("click", () => {
updatesPaused = !updatesPaused;
pauseBtn.textContent = updatesPaused ? "Resume" : "Pause";
pauseBtn.textContent =
updatesPaused
? (firehoseTranslations.resume || "Resume")
: (firehoseTranslations.pause || "Pause");
});
// Start fetching updates with configurable interval
document.addEventListener("click", e => {
const btn = e.target.closest(".toggle-btn");
if (!btn) return;
const row = btn.closest(".packet-row");
row.classList.toggle("expanded");
btn.textContent =
row.classList.contains("expanded") ? "▼" : "▶";
});
await loadTranslationsFirehose();
await configureFirehose();
await loadNodes();
fetchUpdates();
if (firehoseInterval > 0) {
setInterval(fetchUpdates, firehoseInterval * 1000);
}
setInterval(fetchUpdates, updateInterval);
});
</script>

View File

@@ -1,348 +1,560 @@
{% extends "base.html" %}
{% block css %}
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
crossorigin=""/>
<style>
.legend {
background: white;
padding: 8px;
line-height: 1.5;
border-radius: 5px;
box-shadow: 0 0 10px rgba(0,0,0,0.3);
font-size: 14px;
color: black;
}
.legend i {
width: 12px;
height: 12px;
display: inline-block;
margin-right: 6px;
border-radius: 50%;
}
#filter-container {
text-align: center;
margin-top: 10px;
}
.filter-checkbox {
margin: 0 10px;
}
.blinking-tooltip {
background: white;
color: black;
border: 1px solid black;
border-radius: 4px;
padding: 2px 5px;
.legend { background:white;padding:8px;line-height:1.5;border-radius:5px;box-shadow:0 0 10px rgba(0,0,0,0.3);font-size:14px;color:black; }
.legend i { width:12px;height:12px;display:inline-block;margin-right:6px;border-radius:50%; }
#filter-container { text-align:center;margin-top:10px; }
.filter-checkbox { margin:0 10px; }
#share-button,
#reset-filters-button {
padding:5px 15px;border:none;border-radius:4px;font-size:14px;cursor:pointer;color:white;
}
#share-button { margin-left:20px; background-color:#4CAF50; }
#share-button:hover { background-color:#45a049; }
#share-button:active { background-color:#3d8b40; }
#reset-filters-button { margin-left:10px; background-color:#f44336; }
#reset-filters-button:hover { background-color:#da190b; }
#reset-filters-button:active { background-color:#c41e0d; }
.blinking-tooltip { background:white;color:black;border:1px solid black;border-radius:4px;padding:2px 5px; }
</style>
{% endblock %}
{% block body %}
<div id="map" style="width: 100%; height: calc(100vh - 270px)"></div>
<div id="map" style="width:100%; height:calc(100vh - 270px)"></div>
<div id="map-legend"
class="legend"
style="position:absolute;
bottom:30px;
right:15px;
z-index:500;
pointer-events:none;">
<div>
<i style="background:orange; width:15px; height:3px; border-radius:0;"></i>
<span data-translate-lang="legend_traceroute">Traceroute Path (arrowed)</span>
</div>
<div style="margin-top:6px;">
<i style="background:gray; width:15px; height:3px; border-radius:0;"></i>
<span data-translate-lang="legend_neighbor">Neighbor Link</span>
</div>
</div>
<div id="filter-container">
<input type="checkbox" class="filter-checkbox" id="filter-routers-only"> Show Routers Only
<input type="checkbox" class="filter-checkbox" id="filter-routers-only">
<span data-translate-lang="show_routers_only">Show Routers Only</span>
</div>
<div style="text-align:center;margin-top:5px;">
<button id="share-button" onclick="shareCurrentView()" data-translate-lang="share_view">
🔗 Share This View
</button>
<button id="reset-filters-button" onclick="resetFiltersToDefaults()" data-translate-lang="reset_filters">
↺ Reset Filters To Defaults
</button>
</div>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo="
crossorigin=""></script>
<script src="https://unpkg.com/leaflet-polylinedecorator@1.6.0/dist/leaflet.polylinedecorator.js"
integrity="sha384-FhPn/2P/fJGhQLeNWDn9B/2Gml2bPOrKJwFqJXgR3xOPYxWg5mYQ5XZdhUSugZT0"
crossorigin></script>
<script>
// ---- Map Setup ----
var map = L.map('map');
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxZoom: 19,
attribution: '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
}).addTo(map);
/* ======================================================
MAP PAGE TRANSLATION SYSTEM
====================================================== */
// ---- Node Data ----
var markers = {};
var markerById = {};
var nodes = [
{% for node in nodes %}
{
lat: {{ ((node.last_lat / 10**7) + (range(-9,9) | random) / 30000) | round(7) }},
long: {{ ((node.last_long / 10**7) + (range(-9,9) | random) / 30000) | round(7) if node.last_long is not none else "null" }},
long_name: {{ (node.long_name or "") | tojson }},
short_name: {{ (node.short_name or "") | tojson }},
channel: {{ (node.channel or "") | tojson }},
hw_model: {{ (node.hw_model or "") | tojson }},
role: {{ (node.role or "") | tojson }},
last_update: {{ node.last_update | default("", true) | tojson }},
firmware: {{ (node.firmware or "") | tojson }},
id: {{ (node.node_id or "") | tojson }},
isRouter: {{ 'true' if 'router' in (node.role or '').lower() else 'false' }}
}{{ "," if not loop.last else "" }}
{% endfor %}
];
let mapTranslations = {};
const portMap = {1: "Text", 67: "Telemetry", 3: "Position", 70: "Traceroute", 4: "Node Info", 71: "Neighbour Info", 73: "Map Report"};
function timeAgo(date) {
const now = Date.now();
const diff = now - new Date(date);
const seconds = Math.floor(diff / 1000);
const minutes = Math.floor(seconds / 60);
const hours = Math.floor(minutes / 60);
const days = Math.floor(hours / 24);
if (days > 0) return days + "d";
if (hours > 0) return hours + "h";
if (minutes > 0) return minutes + "m";
return seconds + "s";
async function loadTranslationsMap() {
try {
const cfg = await window._siteConfigPromise;
const lang = cfg?.site?.language || "en";
const res = await fetch(`/api/lang?lang=${lang}&section=map`);
mapTranslations = await res.json();
applyTranslationsMap();
} catch (err) {
console.error("Map translation load failed:", err);
}
}
const palette = ["#e6194b","#4363d8","#f58231","#911eb4","#46f0f0","#f032e6","#bcf60c","#fabebe","#008080","#e6beff","#9a6324","#fffac8","#800000","#aaffc3","#808000","#ffd8b1","#000075","#808080"];
const colorMap = new Map();
let nextColorIndex = 0;
function applyTranslationsMap(root = document) {
root.querySelectorAll("[data-translate-lang]").forEach(el => {
const key = el.dataset.translateLang;
const val = mapTranslations[key];
if (!val) return;
function hashToColor(str) {
if (colorMap.has(str)) return colorMap.get(str);
const color = palette[nextColorIndex % palette.length];
colorMap.set(str, color);
nextColorIndex++;
return color;
}
const nodeMap = new Map();
nodes.forEach(n => nodeMap.set(n.id, n));
function isInvalidCoord(node) {
if (!node) return true;
let {lat, long} = node;
return !lat || !long || lat === 0 || long === 0 || Number.isNaN(lat) || Number.isNaN(long);
}
// ---- Marker Plotting ----
var bounds = L.latLngBounds();
var channels = new Set();
nodes.forEach(node => {
if (!isInvalidCoord(node)) {
let category = node.channel;
channels.add(category);
let color = hashToColor(category);
let markerOptions = { radius: node.isRouter ? 9 : 7, color: "white", fillColor: color, fillOpacity: 1, weight: 0.7 };
let popupContent = `<b><a href="/packet_list/${node.id}">${node.long_name}</a> (${node.short_name})</b><br>
<b>Channel:</b> ${node.channel}<br>
<b>Model:</b> ${node.hw_model}<br>
<b>Role:</b> ${node.role}<br>`;
if (node.last_update) popupContent += `<b>Last seen:</b> ${timeAgo(node.last_update)}<br>`;
if (node.firmware) popupContent += `<b>Firmware:</b> ${node.firmware}<br>`;
var marker = L.circleMarker([node.lat, node.long], markerOptions).addTo(map);
marker.nodeId = node.id;
marker.originalColor = color;
markerById[node.id] = marker;
marker.on('click', function(e) {
e.originalEvent.stopPropagation();
marker.bindPopup(popupContent).openPopup();
setTimeout(() => marker.closePopup(), 3000);
onNodeClick(node);
});
if (!markers[category]) markers[category] = [];
markers[category].push({ marker, isRouter: node.isRouter });
bounds.extend(marker.getLatLng());
if (el.tagName === "INPUT" && el.placeholder !== undefined) {
el.placeholder = val;
} else {
el.textContent = val;
}
});
}
var bayAreaBounds = [
[{{ site_config["site"]["map_top_left_lat"] }}, {{ site_config["site"]["map_top_left_lon"] }}],
[{{ site_config["site"]["map_bottom_right_lat"] }}, {{ site_config["site"]["map_bottom_right_lon"] }}]
];
map.fitBounds(bayAreaBounds);
/* ======================================================
EXISTING MAP LOGIC
====================================================== */
// ---- Filters ----
let filterContainer = document.getElementById("filter-container");
channels.forEach(channel => {
let filterId = `filter-${channel.replace(/\s+/g, '-').toLowerCase()}`;
let color = hashToColor(channel);
let label = document.createElement('label');
label.style.color = color;
label.innerHTML = `<input type="checkbox" class="filter-checkbox" id="${filterId}" checked> ${channel}`;
var map = L.map('map');
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png',
{ maxZoom:19, attribution:'&copy; OpenStreetMap' }).addTo(map);
// Data structures
var nodes = [], markers = {}, markerById = {}, nodeMap = new Map();
var edgeLayer = L.layerGroup().addTo(map), selectedNodeId = null;
var activeBlinks = new Map(), lastImportTime = null;
var mapInterval = 0;
const portMap = {
1:"Text",
67:"Telemetry",
3:"Position",
70:"Traceroute",
4:"Node Info",
71:"Neighbour Info",
73:"Map Report"
};
const palette = ["#e6194b","#4363d8","#f58231","#911eb4","#46f0f0","#f032e6","#bcf60c","#fabebe",
"#008080","#e6beff","#9a6324","#fffac8","#800000","#aaffc3","#808000","#ffd8b1",
"#000075","#808080"];
const colorMap = new Map(); let nextColorIndex = 0;
const channelSet = new Set();
map.on("popupopen", function (e) {
const popupEl = e.popup.getElement();
if (popupEl) applyTranslationsMap(popupEl);
});
function timeAgoFromUs(us){
const diff = Date.now() - (us / 1000);
const s = Math.floor(diff/1000), m = Math.floor(s/60),
h = Math.floor(m/60), d = Math.floor(h/24);
return d>0?d+"d":h>0?h+"h":m>0?m+"m":s+"s";
}
function hashToColor(str){
if(colorMap.has(str)) return colorMap.get(str);
const c = palette[nextColorIndex++ % palette.length];
colorMap.set(str,c);
return c;
}
function isInvalidCoord(n){
return !n || !n.lat || !n.long || n.lat === 0 || n.long === 0 ||
Number.isNaN(n.lat) || Number.isNaN(n.long);
}
/* ======================================================
PACKET FETCHING (unchanged)
====================================================== */
function fetchLatestPacket(){
fetch(`/api/packets?limit=1`)
.then(r=>r.json())
.then(data=>{
lastImportTime=data.packets?.[0]?.import_time_us||0;
})
.catch(console.error);
}
function fetchNewPackets(){
if(mapInterval <= 0) return;
if(lastImportTime===null) return;
const url = new URL(`/api/packets`, window.location.origin);
url.searchParams.set("since", lastImportTime);
url.searchParams.set("limit", 50);
fetch(url)
.then(r=>r.json())
.then(data=>{
if(!data.packets || data.packets.length===0) return;
let latest = lastImportTime;
data.packets.forEach(pkt=>{
if(pkt.import_time_us > latest) latest = pkt.import_time_us;
const marker = markerById[pkt.from_node_id];
const nodeData = nodeMap.get(pkt.from_node_id);
if(marker && nodeData) blinkNode(marker,nodeData.long_name,pkt.portnum);
});
lastImportTime = latest;
})
.catch(console.error);
}
let packetInterval=null;
function startPacketFetcher(){
if(mapInterval<=0) return;
if(!packetInterval){
fetchLatestPacket();
packetInterval=setInterval(fetchNewPackets,mapInterval*1000);
}
}
function stopPacketFetcher(){
if(packetInterval){
clearInterval(packetInterval);
packetInterval=null;
}
}
document.addEventListener("visibilitychange",()=>{
document.hidden?stopPacketFetcher():startPacketFetcher();
});
async function waitForConfig() {
while (typeof window._siteConfigPromise === "undefined") {
await new Promise(r => setTimeout(r, 100));
}
try {
const cfg = await window._siteConfigPromise;
return cfg.site || {};
} catch (err) {
console.error("Error loading site config:", err);
return {};
}
}
async function initMapPolling() {
try {
const site = await waitForConfig();
mapInterval = parseInt(site.map_interval, 10) || 0;
const params = new URLSearchParams(window.location.search);
const lat = parseFloat(params.get('lat'));
const lng = parseFloat(params.get('lng'));
const zoom = parseInt(params.get('zoom'), 10);
if (!isNaN(lat) && !isNaN(lng) && !isNaN(zoom)) {
map.setView([lat, lng], zoom);
window.configBoundsApplied = true;
setTimeout(() => map.invalidateSize(), 100);
}
else {
const tl = [parseFloat(site.map_top_left_lat), parseFloat(site.map_top_left_lon)];
const br = [parseFloat(site.map_bottom_right_lat), parseFloat(site.map_bottom_right_lon)];
if (tl.every(isFinite) && br.every(isFinite)) {
map.fitBounds([tl, br]);
window.configBoundsApplied = true;
setTimeout(() => map.invalidateSize(), 100);
}
}
if (mapInterval > 0) startPacketFetcher();
} catch (err) {
console.error("Failed to load /api/config:", err);
}
}
initMapPolling();
/* ======================================================
LOAD NODES
====================================================== */
fetch('/api/nodes?days_active=3')
.then(r=>r.json())
.then(data=>{
if(!data.nodes) return;
nodes = data.nodes.map(n=>({
key: n.node_id ?? n.id,
id: n.id,
node_id: n.node_id,
lat: n.last_lat ? n.last_lat/1e7 : null,
long: n.last_long ? n.last_long/1e7 : null,
long_name: n.long_name || "",
short_name: n.short_name || "",
channel: n.channel || "",
hw_model: n.hw_model || "",
role: n.role || "",
firmware: n.firmware || "",
last_seen_us: n.last_seen_us || null,
isRouter: (n.role||"").toLowerCase().includes("router")
}));
nodes.forEach(n=>{
nodeMap.set(n.key, n);
if(n.channel) channelSet.add(n.channel);
});
renderNodesOnMap();
createChannelFilters();
})
.catch(console.error);
/* ======================================================
RENDER NODES
====================================================== */
function renderNodesOnMap(){
nodes.forEach(node=>{
if(isInvalidCoord(node)) return;
const color = hashToColor(node.channel);
const marker = L.circleMarker([node.lat,node.long], {
radius: node.isRouter ? 9 : 7,
color: "white",
fillColor: color,
fillOpacity: 1,
weight: 0.7
}).addTo(map);
marker.nodeId = node.key;
marker.originalColor = color;
markerById[node.key] = marker;
const popup = `
<b><a href="/node/${node.node_id}">${node.long_name}</a> (${node.short_name})</b><br>
<b data-translate-lang="channel_label"></b> ${node.channel}<br>
<b data-translate-lang="model_label"></b> ${node.hw_model}<br>
<b data-translate-lang="role_label"></b> ${node.role}<br>
${
node.last_seen_us
? `<b data-translate-lang="last_seen"></b> ${timeAgoFromUs(node.last_seen_us)}<br>`
: ""
}
${
node.firmware
? `<b data-translate-lang="firmware"></b> ${node.firmware}<br>`
: ""
}
`;
marker.on('click', () => {
onNodeClick(node);
marker.bindPopup(popup).openPopup();
});
});
setTimeout(() => applyTranslationsMap(), 50);
}
/* ======================================================
⭐ NEW: DYNAMIC EDGE LOADING
====================================================== */
async function onNodeClick(node){
selectedNodeId = node.key;
edgeLayer.clearLayers();
try {
const res = await fetch(`/api/edges?node_id=${node.key}`);
const data = await res.json();
const edges = data.edges || [];
edges.forEach(edge=>{
const f = nodeMap.get(edge.from);
const t = nodeMap.get(edge.to);
if(!f || !t || isInvalidCoord(f) || isInvalidCoord(t)) return;
const color = edge.type === "neighbor" ? "gray" : "orange";
const line = L.polyline([[f.lat, f.long], [t.lat, t.long]], {
color, weight: 3
}).addTo(edgeLayer);
if(edge.type === "traceroute"){
L.polylineDecorator(line, {
patterns: [
{
offset: '100%',
repeat: 0,
symbol: L.Symbol.arrowHead({
pixelSize:5,
polygon:false,
pathOptions:{stroke:true,color}
})
}
]
}).addTo(edgeLayer);
}
});
} catch(err){
console.error("Failed to load edges for node", node.key, err);
}
}
map.on('click', e=>{
if(!e.originalEvent.target.classList.contains('leaflet-interactive')){
edgeLayer.clearLayers();
selectedNodeId=null;
}
});
/* ======================================================
BLINKING
====================================================== */
function blinkNode(marker,longName,portnum){
if(!map.hasLayer(marker)) return;
if(activeBlinks.has(marker)){
clearInterval(activeBlinks.get(marker));
marker.setStyle({ fillColor: marker.originalColor });
if(marker.tooltip) map.removeLayer(marker.tooltip);
}
let blinkCount = 0;
const tooltip = L.tooltip({
permanent:true,
direction:'top',
offset:[0,-marker.options.radius-5],
className:'blinking-tooltip'
})
.setContent(`${longName} (${portMap[portnum] || "Port "+portnum})`)
.setLatLng(marker.getLatLng())
.addTo(map);
marker.tooltip = tooltip;
const interval = setInterval(()=>{
if(map.hasLayer(marker)){
marker.setStyle({
fillColor: blinkCount%2===0 ? 'yellow' : marker.originalColor
});
marker.bringToFront();
}
blinkCount++;
if(blinkCount>7){
clearInterval(interval);
marker.setStyle({ fillColor: marker.originalColor });
map.removeLayer(tooltip);
activeBlinks.delete(marker);
}
},500);
activeBlinks.set(marker, interval);
}
/* ======================================================
CHANNEL FILTERS
====================================================== */
function createChannelFilters(){
const filterContainer = document.getElementById("filter-container");
const saved = JSON.parse(localStorage.getItem("mapFilters") || "{}");
channelSet.forEach(channel=>{
const cb=document.createElement("input");
cb.type="checkbox";
cb.className="filter-checkbox";
cb.id=`filter-channel-${channel}`;
cb.checked = saved[channel] !== false;
cb.addEventListener("change", saveFiltersToLocalStorage);
cb.addEventListener("change", updateNodeVisibility);
filterContainer.appendChild(cb);
const label=document.createElement("label");
label.htmlFor=cb.id;
label.innerText=channel;
label.style.color = hashToColor(channel);
filterContainer.appendChild(label);
});
function updateMarkers() {
let showRoutersOnly = document.getElementById("filter-routers-only").checked;
nodes.forEach(node => {
let category = node.channel;
let checkbox = document.getElementById(`filter-${category.replace(/\s+/g,'-').toLowerCase()}`);
let shouldShow = checkbox.checked && (!showRoutersOnly || node.isRouter);
let marker = markerById[node.id];
if (marker) marker.setStyle({ fillOpacity: shouldShow ? 1 : 0 });
});
}
const routerOnly=document.getElementById("filter-routers-only");
routerOnly.checked = saved["routersOnly"] || false;
document.querySelectorAll(".filter-checkbox").forEach(input => input.addEventListener("change", updateMarkers));
routerOnly.addEventListener("change", saveFiltersToLocalStorage);
routerOnly.addEventListener("change", updateNodeVisibility);
// ---- Edges ----
var edgeLayer = L.layerGroup().addTo(map);
var edgesData = null;
let selectedNodeId = null;
updateNodeVisibility();
}
fetch('/api/edges').then(res => res.json()).then(data => { edgesData = data.edges; }).catch(err => console.error(err));
function saveFiltersToLocalStorage(){
const state = {};
channelSet.forEach(ch=>{
state[ch] = document.getElementById(`filter-channel-${ch}`).checked;
});
state["routersOnly"] = document.getElementById("filter-routers-only").checked;
function onNodeClick(node) {
if (selectedNodeId != node.id) {
selectedNodeId = node.id;
edgeLayer.clearLayers();
if (!edgesData) return;
if (!map.hasLayer(edgeLayer)) edgeLayer.addTo(map);
localStorage.setItem("mapFilters", JSON.stringify(state));
}
edgesData.forEach(edge => {
if (edge.from !== node.id && edge.to !== node.id) return;
const fromNode = nodeMap.get(edge.from);
const toNode = nodeMap.get(edge.to);
if (!fromNode || !toNode) return;
if (isInvalidCoord(fromNode) || isInvalidCoord(toNode)) return;
function updateNodeVisibility(){
const routerOnly = document.getElementById("filter-routers-only").checked;
const activeChannels = [...channelSet].filter(ch =>
document.getElementById(`filter-channel-${ch}`).checked
);
const lineColor = edge.type === "neighbor" ? "darkred" : "black";
const dash = edge.type === "traceroute" ? "5,5" : null;
const weight = edge.type === "neighbor" ? 3 : 2;
nodes.forEach(n=>{
const marker = markerById[n.key];
if(marker){
const visible =
(!routerOnly || n.isRouter) &&
activeChannels.includes(n.channel);
const polyline = L.polyline([[fromNode.lat, fromNode.long],[toNode.lat, toNode.long]], { color: lineColor, weight, opacity: 1, dashArray: dash }).addTo(edgeLayer).bringToFront();
if (edge.type === "traceroute") {
L.polylineDecorator(polyline, {
patterns: [{ offset: '100%', repeat: 0, symbol: L.Symbol.arrowHead({ pixelSize: 5, polygon: false, pathOptions: { stroke: true, color: lineColor } }) }]
}).addTo(edgeLayer);
}
});
}
}
map.on('click', function(e) {
if (!e.originalEvent.target.classList.contains('leaflet-interactive')) {
edgeLayer.clearLayers();
selectedNodeId = null;
visible ? map.addLayer(marker) : map.removeLayer(marker);
}
});
}
// ---- Blinking Nodes ----
var activeBlinks = new Map();
/* ======================================================
SHARE / RESET
====================================================== */
function blinkNode(marker, longName, portnum) {
if (!map.hasLayer(marker)) return;
if (activeBlinks.has(marker)) {
clearInterval(activeBlinks.get(marker));
marker.setStyle({ fillColor: marker.originalColor });
if (marker.tooltip) map.removeLayer(marker.tooltip);
}
function shareCurrentView() {
const c = map.getCenter();
const url = `${window.location.origin}/map?lat=${c.lat.toFixed(6)}&lng=${c.lng.toFixed(6)}&zoom=${map.getZoom()}`;
let blinkCount = 0;
let portName = portMap[portnum] || `Port ${portnum}`;
let tooltip = L.tooltip({
permanent: true,
direction: 'top',
offset: [0, -marker.options.radius - 5],
className: 'blinking-tooltip'
}).setContent(`${longName} (${portName})`).setLatLng(marker.getLatLng());
tooltip.addTo(map);
marker.tooltip = tooltip;
navigator.clipboard.writeText(url).then(()=>{
const btn = document.getElementById('share-button');
const old = btn.textContent;
btn.textContent = '✓ ' + (mapTranslations.link_copied || 'Link Copied!');
btn.style.backgroundColor = '#2196F3';
let interval = setInterval(() => {
if (map.hasLayer(marker)) {
// Alternate color
marker.setStyle({ fillColor: blinkCount % 2 === 0 ? 'yellow' : marker.originalColor });
// Bring marker to top
marker.bringToFront();
}
blinkCount++;
if (blinkCount > 7) {
clearInterval(interval);
marker.setStyle({ fillColor: marker.originalColor });
map.removeLayer(tooltip);
activeBlinks.delete(marker);
}
}, 500);
activeBlinks.set(marker, interval);
}
// ---- Packet Fetching ----
let lastImportTime = null;
function fetchLatestPacket() {
fetch(`/api/packets?limit=1`)
.then(res => res.json())
.then(data => {
if (data.packets && data.packets.length > 0) {
lastImportTime = data.packets[0].import_time;
console.log("Initial lastImportTime:", lastImportTime);
} else {
lastImportTime = new Date().toISOString();
console.log("No packets, setting lastImportTime to now:", lastImportTime);
}
})
.catch(err => console.error("Error fetching latest packet:", err));
}
function fetchNewPackets() {
if (!lastImportTime) return;
fetch(`/api/packets?since=${lastImportTime}`)
.then(res => res.json())
.then(data => {
console.log("===== New Fetch =====");
if (!data.packets || data.packets.length === 0) {
console.log("No new packets");
return;
}
let latestSeen = lastImportTime;
data.packets.forEach(packet => {
console.log(`Packet ID: ${packet.id}, From Node: ${packet.from_node_id}, Port: ${packet.portnum}, Time: ${packet.import_time}`);
if (packet.import_time && (!latestSeen || packet.import_time > latestSeen)) latestSeen = packet.import_time;
let marker = markerById[packet.from_node_id];
if (marker) {
let nodeData = nodeMap.get(packet.from_node_id);
if (nodeData) blinkNode(marker, nodeData.long_name, packet.portnum);
}
});
if (latestSeen) lastImportTime = latestSeen;
console.log("Updated lastImportTime:", lastImportTime);
console.log("===== End Fetch =====");
})
.catch(err => console.error("Fetch error:", err));
}
// ---- Polling Control ----
let packetInterval = null;
const mapInterval = {{ site_config["site"]["map_interval"] | default(3) }};
function startPacketFetcher() {
if (mapInterval <= 0) return;
if (!packetInterval) {
fetchLatestPacket();
packetInterval = setInterval(fetchNewPackets, mapInterval * 1000);
console.log("Packet fetcher started, interval:", mapInterval, "seconds");
}
}
function stopPacketFetcher() {
if (packetInterval) {
clearInterval(packetInterval);
packetInterval = null;
console.log("Packet fetcher stopped");
}
}
document.addEventListener("visibilitychange", function() {
if (document.hidden) stopPacketFetcher();
else startPacketFetcher();
setTimeout(()=>{
btn.textContent = old;
btn.style.backgroundColor = '#4CAF50';
}, 2000);
});
}
// ---- Initialize ----
if (mapInterval > 0) startPacketFetcher();
function resetFiltersToDefaults(){
document.getElementById("filter-routers-only").checked = false;
channelSet.forEach(ch => {
document.getElementById(`filter-channel-${ch}`).checked = true;
});
saveFiltersToLocalStorage();
updateNodeVisibility();
}
/* ======================================================
TRANSLATION LOAD
====================================================== */
document.addEventListener("DOMContentLoaded", () => {
loadTranslationsMap();
});
</script>
{% endblock %}

View File

@@ -1,54 +1,233 @@
{% extends "base.html" %}
{% block css %}
.timestamp {
min-width:10em;
}
.chat-packet:nth-of-type(odd){
background-color: #3a3a3a; /* Lighter than #2a2a2a */
}
.timestamp { min-width: 10em; color: #ccc; }
.chat-packet:nth-of-type(odd) { background-color: #3a3a3a; }
.chat-packet {
border-bottom: 1px solid #555;
padding: 8px;
border-radius: 8px; /* Adjust the value to make the corners more or less rounded */
padding: 3px 6px;
border-radius: 6px;
margin: 0;
}
.chat-packet:nth-of-type(even){
background-color: #333333; /* Slightly lighter than the previous #181818 */
.chat-packet > [class^="col-"] {
padding-left: 10px !important;
padding-right: 10px !important;
padding-top: 1px !important;
padding-bottom: 1px !important;
}
.chat-packet:nth-of-type(even) { background-color: #333333; }
.channel { font-style: italic; color: #bbb; }
.channel a { font-style: normal; color: #999; }
#weekly-message { margin: 15px 0; font-weight: bold; color: #ffeb3b; }
#total-count { margin-bottom: 10px; font-style: italic; color: #ccc; }
{% endblock %}
{% block body %}
<div class="container">
{{ site_config["site"]["weekly_net_message"] }} <br><br>
<!-- ⭐ NET TITLE WITH ICON ⭐ -->
<div class="container px-2">
<h2 style="color:white; margin:0 0 10px 0;">
<span class="icon">💬</span>
<span data-translate-lang="net_title"></span>
</h2>
</div>
<h5>Number of Check-ins: {{ packets|length }}</h5>
<!-- Weekly network message -->
<div id="weekly-message"></div>
<!-- Total message count -->
<div id="total-count">
<span data-translate-lang="total_messages">Total messages:</span>
<span id="total-count-value">0</span>
</div>
<div id="chat-container">
<div class="container" id="chat-log"></div>
</div>
</div>
<div class="container">
{% for packet in packets %}
<div
class="row chat-packet"
data-packet-id="{{ packet.id }}"
role="article"
aria-label="Chat message from {{ packet.from_node.long_name or (packet.from_node_id | node_id_to_hex) }}"
>
<span class="col-2 timestamp">
{{ packet.import_time.strftime('%-I:%M:%S %p - %m-%d-%Y') }}
</span>
<span class="col-1 timestamp">
<a href="/packet/{{ packet.id }}" title="View packet details">✉️</a> {{ packet.from_node.channel }}
</span>
<span class="col-2 username">
<a href="/packet_list/{{ packet.from_node_id }}" title="View all packets from this node">
{{ packet.from_node.long_name or (packet.from_node_id | node_id_to_hex) }}
</a>
</span>
<span class="col-5 message">
{{ packet.payload }}
</span>
</div>
{% else %}
No packets found.
{% endfor %}
</div>
<script>
document.addEventListener("DOMContentLoaded", async () => {
const chatContainer = document.querySelector("#chat-log");
const weeklyMessageEl = document.querySelector("#weekly-message");
const totalCountValueEl = document.querySelector("#total-count-value");
if (!chatContainer || !weeklyMessageEl || !totalCountValueEl) {
console.error("Required elements missing");
return;
}
const renderedPacketIds = new Set();
let netTranslations = {};
let netTag = "";
/* -----------------------------------
Escape HTML safely
----------------------------------- */
function escapeHtml(text) {
const div = document.createElement("div");
div.textContent = text ?? "";
return div.innerHTML;
}
/* -----------------------------------
Apply translations
----------------------------------- */
function applyTranslations(trans, root=document) {
root.querySelectorAll("[data-translate-lang]").forEach(el => {
const key = el.dataset.translateLang;
if (trans[key]) el.textContent = trans[key];
});
root.querySelectorAll("[data-translate-lang-title]").forEach(el => {
const key = el.dataset.translateLangTitle;
if (trans[key]) el.title = trans[key];
});
}
/* -----------------------------------
Update count
----------------------------------- */
function updateTotalCount() {
totalCountValueEl.textContent = renderedPacketIds.size;
}
/* -----------------------------------
Render single packet
----------------------------------- */
function renderPacket(packet) {
if (renderedPacketIds.has(packet.id)) return;
renderedPacketIds.add(packet.id);
const date = new Date(packet.import_time_us / 1000);
const timeStr = date.toLocaleTimeString([], {
hour: "numeric",
minute: "2-digit",
second: "2-digit",
hour12: true
});
const dateStr =
`${String(date.getMonth()+1).padStart(2,"0")}/`+
`${String(date.getDate()).padStart(2,"0")}/`+
date.getFullYear();
const timestamp = `${timeStr} - ${dateStr}`;
const fromName =
(packet.long_name || "").trim() ||
`${netTranslations.node_fallback} ${packet.from_node_id}`;
const div = document.createElement("div");
div.className = "row chat-packet";
div.dataset.packetId = packet.id;
div.innerHTML = `
<span class="col-2 timestamp" title="${packet.import_time_us}">
${timestamp}
</span>
<span class="col-2 channel">
<a href="/packet/${packet.id}"
data-translate-lang-title="view_packet_details">✉️</a>
${escapeHtml(packet.channel || "")}
</span>
<span class="col-3 nodename">
<a href="/node/${packet.from_node_id}">
${escapeHtml(fromName)}
</a>
</span>
<span class="col-5 message">
${escapeHtml(packet.payload).replace(/\n/g,"<br>")}
</span>
`;
chatContainer.prepend(div);
applyTranslations(netTranslations, div);
updateTotalCount();
}
/* -----------------------------------
Sort descending by time
----------------------------------- */
function renderPacketsEnsureDescending(packets) {
if (!packets || !packets.length) return;
const sorted = packets.slice().sort((a, b) => b.import_time_us - a.import_time_us);
for (let i = sorted.length - 1; i >= 0; i--) {
renderPacket(sorted[i]);
}
}
/* -----------------------------------
Fetch initial net-tagged packets
----------------------------------- */
async function fetchInitialPackets(tag) {
if (!tag) return;
try {
const sixDaysAgoMs = Date.now() - 6*24*60*60*1000;
const sinceUs = Math.floor(sixDaysAgoMs * 1000);
const url =
`/api/packets?portnum=1&contains=${encodeURIComponent(tag)}&since=${sinceUs}&limit=1000`;
const resp = await fetch(url);
const data = await resp.json();
if (data?.packets?.length)
renderPacketsEnsureDescending(data.packets);
} catch (err) {
console.error("Initial fetch error:", err);
}
}
/* -----------------------------------
Load translations from section=net
----------------------------------- */
async function loadTranslations(cfg) {
try {
const lang = cfg?.site?.language || "en";
const res = await fetch(`/api/lang?lang=${lang}&section=net`);
netTranslations = await res.json();
applyTranslations(netTranslations, document);
} catch (err) {
console.error("Failed loading translations", err);
}
}
/* -----------------------------------
MAIN
----------------------------------- */
try {
const cfg = await window._siteConfigPromise;
const site = cfg?.site || {};
netTag = site.net_tag || "";
weeklyMessageEl.textContent = site.weekly_net_message || "";
await loadTranslations(cfg);
await fetchInitialPackets(netTag);
} catch (err) {
console.error("Initialization failed:", err);
weeklyMessageEl.textContent =
netTranslations.failed_to_load_site_config ||
"Failed to load site config.";
}
});
</script>
{% endblock %}

File diff suppressed because it is too large Load Diff

View File

@@ -1,58 +0,0 @@
{% extends "base.html" %}
{% block css %}
#node_info {
height:100%;
}
#map{
height:100%;
min-height: 400px;
}
#packet_details{
height: 95vh;
overflow: scroll;
top: 3em;
}
div.tab-pane > dl {
display: inline-block;
}
{% endblock %}
{% block body %}
{% include "search_form.html" %}
<div class="row">
<div class="col mb-3">
<div class="card" id="node_info">
{% if node %}
<div class="card-header">
{{node.long_name}}
</div>
<div class="card-body">
<dl >
<dt>ShortName</dt>
<dd>{{node.short_name}}</dd>
<dt>HW Model</dt>
<dd>{{node.hw_model}}</dd>
<dt>Role</dt>
<dd>{{node.role}}</dd>
</dl>
</div>
{% else %}
<div class="card-body">
A NodeInfo has not been seen.
</div>
{% endif %}
</div>
</div>
<div class="row">
<div class="col">
{% include 'packet_list.html' %}
</div>
</div>
<div class="col mb-3">
<div id="map"></div>
</div>
</div>
{% endblock %}

View File

@@ -1,263 +0,0 @@
{% macro graph(name) %}
<div id="{{name}}Chart" style="width: 100%; height: 100%;"></div>
{% endmacro %}
<!-- Download and Expand buttons -->
<div class="d-flex justify-content-end mb-2">
<button class="btn btn-sm btn-outline-light me-2" id="downloadCsvBtn">Download CSV</button>
<button class="btn btn-sm btn-outline-light" data-bs-toggle="modal" data-bs-target="#fullChartModal">Expand</button>
</div>
<!-- Tab Navigation -->
<ul class="nav nav-tabs" role="tablist">
{% for name in [
"power", "utilization", "temperature", "humidity", "pressure",
"iaq", "wind_speed", "wind_direction", "power_metrics", "neighbors"
] %}
<li class="nav-item" role="presentation">
<button class="nav-link {% if loop.first %}active{% endif %}" data-bs-toggle="tab" data-bs-target="#{{name}}Tab" type="button" role="tab">{{ name | capitalize }}</button>
</li>
{% endfor %}
</ul>
<!-- Tab Content -->
<div class="tab-content mt-3" style="height: 40vh;">
{% for name in [
"power", "utilization", "temperature", "humidity", "pressure",
"iaq", "wind_speed", "wind_direction", "power_metrics", "neighbors"
] %}
<div class="tab-pane fade {% if loop.first %}show active{% endif %}" id="{{name}}Tab" role="tabpanel" style="height: 100%;">
{{ graph(name) | safe }}
</div>
{% endfor %}
</div>
<!-- Fullscreen Modal -->
<div class="modal fade" id="fullChartModal" tabindex="-1" aria-labelledby="fullChartModalLabel" aria-hidden="true">
<div class="modal-dialog modal-fullscreen">
<div class="modal-content bg-dark text-white">
<div class="modal-header">
<h5 class="modal-title" id="fullChartModalLabel">Full Graph</h5>
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body" style="height: 100vh;">
<div id="fullChartContainer" style="width: 100%; height: 100%;"></div>
</div>
</div>
</div>
</div>
<!-- ECharts Library -->
<script src="https://cdn.jsdelivr.net/npm/echarts@5/dist/echarts.min.js"></script>
<script>
document.addEventListener("DOMContentLoaded", function () {
let currentChart = null;
let currentChartName = null;
let currentChartData = null;
let fullChart = null;
async function loadChart(name, targetDiv) {
currentChartName = name;
const chartDiv = document.getElementById(targetDiv);
if (!chartDiv) return;
try {
const resp = await fetch(`/graph/${name}_json/{{ node_id }}`);
if (!resp.ok) throw new Error(`Failed to load data for ${name}`);
const data = await resp.json();
// Reverse for chronological order
data.timestamps.reverse();
data.series.forEach(s => s.data.reverse());
const formattedDates = data.timestamps.map(t => {
const d = new Date(t);
return `${(d.getMonth() + 1).toString().padStart(2, '0')}-${d.getDate().toString().padStart(2, '0')}-${d.getFullYear().toString().slice(-2)}`;
});
currentChartData = {
...data,
timestamps: formattedDates
};
const chart = echarts.init(chartDiv);
const isDualAxis = name === 'power';
chart.setOption({
tooltip: {
trigger: 'axis',
formatter: function (params) {
return params.map(p => {
const label = p.seriesName.toLowerCase();
const unit = label.includes('volt') ? 'V' : label.includes('battery') ? '%' : '';
return `${p.marker} ${p.seriesName}: ${p.data}${unit}`;
}).join('<br>');
}
},
xAxis: {
type: 'category',
data: formattedDates,
axisLabel: { color: '#fff', rotate: 45 },
},
yAxis: isDualAxis ? [
{
type: 'value',
name: 'Battery (%)',
min: 0,
max: 120,
position: 'left',
axisLabel: { color: '#fff' },
nameTextStyle: { color: '#fff' }
},
{
type: 'value',
name: 'Voltage (V)',
min: 0,
max: 6,
position: 'right',
axisLabel: { color: '#fff' },
nameTextStyle: { color: '#fff' }
}
] : {
type: 'value',
axisLabel: { color: '#fff' },
},
series: data.series.map(s => ({
name: s.name,
type: 'line',
data: s.data,
smooth: true,
connectNulls: true,
showSymbol: false,
yAxisIndex: isDualAxis && s.name.toLowerCase().includes('volt') ? 1 : 0,
})),
legend: { textStyle: { color: '#fff' } }
});
return chart;
} catch (err) {
console.error(err);
currentChartData = null;
currentChartName = null;
chartDiv.innerHTML = `<div class="text-white text-center mt-5">Error loading ${name} data.</div>`;
return null;
}
}
// Load first chart
const firstTabBtn = document.querySelector('.nav-tabs button.nav-link.active');
if (firstTabBtn) {
const name = firstTabBtn.textContent.toLowerCase();
const chartId = `${name}Chart`;
loadChart(name, chartId).then(chart => currentChart = chart);
}
// On tab switch
document.querySelectorAll('.nav-tabs button.nav-link').forEach(button => {
button.addEventListener('shown.bs.tab', event => {
const name = event.target.textContent.toLowerCase();
const chartId = `${name}Chart`;
loadChart(name, chartId).then(chart => currentChart = chart);
});
});
// CSV Download
document.getElementById('downloadCsvBtn').addEventListener('click', () => {
if (!currentChartData || !currentChartName) {
alert("Chart data not loaded yet.");
return;
}
const { timestamps, series } = currentChartData;
let csv = 'Date,' + series.map(s => s.name).join(',') + '\n';
for (let i = 0; i < timestamps.length; i++) {
const row = [timestamps[i]];
for (const s of series) {
row.push(s.data[i] != null ? s.data[i] : '');
}
csv += row.join(',') + '\n';
}
const blob = new Blob([csv], { type: 'text/csv' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${currentChartName}_{{ node_id }}.csv`;
a.click();
URL.revokeObjectURL(url);
});
// Fullscreen modal chart
document.getElementById('fullChartModal').addEventListener('shown.bs.modal', () => {
if (!currentChartData || !currentChartName) return;
if (!fullChart) {
fullChart = echarts.init(document.getElementById('fullChartContainer'));
}
const isDualAxis = currentChartName === 'power';
fullChart.setOption({
title: { text: currentChartName.charAt(0).toUpperCase() + currentChartName.slice(1), textStyle: { color: '#fff' } },
tooltip: {
trigger: 'axis',
formatter: function (params) {
return params.map(p => {
const label = p.seriesName.toLowerCase();
const unit = label.includes('volt') ? 'V' : label.includes('battery') ? '%' : '';
return `${p.marker} ${p.seriesName}: ${p.data}${unit}`;
}).join('<br>');
}
},
xAxis: {
type: 'category',
data: currentChartData.timestamps,
axisLabel: { color: '#fff', rotate: 45 },
},
yAxis: isDualAxis ? [
{
type: 'value',
name: 'Battery (%)',
min: 0,
max: 120,
position: 'left',
axisLabel: { color: '#fff' },
nameTextStyle: { color: '#fff' }
},
{
type: 'value',
name: 'Voltage (V)',
min: 0,
max: 6,
position: 'right',
axisLabel: { color: '#fff' },
nameTextStyle: { color: '#fff' }
}
] : {
type: 'value',
axisLabel: { color: '#fff' },
},
series: currentChartData.series.map(s => ({
name: s.name,
type: 'line',
data: s.data,
smooth: true,
connectNulls: true,
showSymbol: false,
yAxisIndex: isDualAxis && s.name.toLowerCase().includes('volt') ? 1 : 0,
})),
legend: { textStyle: { color: '#fff' } }
});
fullChart.resize();
});
window.addEventListener('resize', () => {
if (fullChart) fullChart.resize();
if (currentChart) currentChart.resize();
});
});
</script>

Some files were not shown because too many files have changed in this diff Show More