Compare commits

..

270 Commits

Author SHA1 Message Date
SpudGunMan
17bfb8ec3e Update xtide.md 2025-10-29 11:56:24 -07:00
SpudGunMan
0cfe4a39ed refactor 2025-10-28 22:14:34 -07:00
copilot-swe-agent[bot]
fc5476b5dd Update documentation for global tide prediction support
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 03:58:26 +00:00
copilot-swe-agent[bot]
f40d5b24f6 Add comprehensive error handling and documentation for xtide module
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 03:57:04 +00:00
copilot-swe-agent[bot]
f8782de291 Add tidepredict support for global tide predictions
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 03:53:07 +00:00
copilot-swe-agent[bot]
74f4cd284c Initial plan 2025-10-29 03:46:26 +00:00
SpudGunMan
17cce3b98b Update custom_scheduler.template 2025-10-28 20:23:24 -07:00
SpudGunMan
ed768b48fe Update custom_scheduler.template 2025-10-28 20:22:25 -07:00
SpudGunMan
cb8dc50424 Update install.sh 2025-10-28 20:21:29 -07:00
SpudGunMan
17cde0ca36 Update config.template 2025-10-28 20:11:20 -07:00
SpudGunMan
206b72ec4f init 2025-10-28 19:50:11 -07:00
Kelly
eadc843e27 Merge pull request #247 from SpudGunMan/copilot/enhancement-basic-scheduler
Add scheduler support for news, RSS, marine weather, system info, tide, and solar
refactored some other logic around scheduler and also the update and installer
2025-10-28 19:44:54 -07:00
SpudGunMan
14709e2828 Update scheduler.py 2025-10-28 19:43:42 -07:00
SpudGunMan
4a5d877a3d Update scheduler.py 2025-10-28 19:43:24 -07:00
SpudGunMan
0159c90708 install patch 2025-10-28 19:29:12 -07:00
SpudGunMan
05648f23f2 Update update.sh 2025-10-28 19:06:37 -07:00
SpudGunMan
f27fbdf3c9 Update scheduler.py 2025-10-28 19:01:15 -07:00
SpudGunMan
998c4078bc 🐑 2025-10-28 18:58:20 -07:00
SpudGunMan
666ae24d2c sunday 2025-10-28 18:42:29 -07:00
SpudGunMan
41e7c1207a Update scheduler.py 2025-10-28 18:00:37 -07:00
SpudGunMan
41c6de4183 Update inventory.md 2025-10-28 17:47:54 -07:00
SpudGunMan
af83ba636f not gonna
promise anything
2025-10-28 17:43:45 -07:00
copilot-swe-agent[bot]
8b54c52e7f Update config.template with new scheduler options documentation
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 00:41:59 +00:00
copilot-swe-agent[bot]
240dd4b46f Update documentation for new scheduler options
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 00:40:19 +00:00
copilot-swe-agent[bot]
7505c9ec22 Add basic scheduler support for news, readrss, mwx, sysinfo, tide, and sun
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-29 00:37:53 +00:00
SpudGunMan
14c22c8156 Create fakeNode.py 2025-10-28 17:31:56 -07:00
copilot-swe-agent[bot]
88dcce2b23 Initial plan 2025-10-29 00:31:30 +00:00
Kelly
5bc842c7e8 Merge pull request #243 from SpudGunMan/copilot/enhance-check-in-check-out
Add inventory/POS system and enhance check-in/check-out with safety monitoring
2025-10-28 17:22:42 -07:00
SpudGunMan
f73bef5894 refactor 2025-10-28 17:21:46 -07:00
SpudGunMan
9371e96feb refactor 2025-10-28 17:21:31 -07:00
SpudGunMan
85345ca45f Update db_admin.py 2025-10-28 17:21:10 -07:00
SpudGunMan
823554f689 rename template 2025-10-28 17:02:12 -07:00
SpudGunMan
5426202d51 Update system.py 2025-10-28 16:02:18 -07:00
SpudGunMan
685e0762bc Update README.md 2025-10-28 15:33:20 -07:00
SpudGunMan
8bc81cee00 docs 2025-10-28 14:02:18 -07:00
SpudGunMan
82f55c6a32 refactor
added loan items
2025-10-28 13:57:56 -07:00
SpudGunMan
be885aa00c Update inventory.md 2025-10-28 13:49:53 -07:00
SpudGunMan
536fd4deea Update checklist.py 2025-10-28 13:49:46 -07:00
SpudGunMan
eb25e55c97 Update inventory.py 2025-10-28 13:46:14 -07:00
SpudGunMan
b7f25c7c5c Update inventory.md 2025-10-28 13:43:06 -07:00
SpudGunMan
c1f1bc5eb9 docs 2025-10-28 13:40:50 -07:00
SpudGunMan
a9c00e92c7 Update checklist.py 2025-10-28 13:26:50 -07:00
SpudGunMan
713e3102f3 Update inventory.py 2025-10-28 13:22:06 -07:00
SpudGunMan
25136d1dd6 Update checklist.py 2025-10-28 13:22:02 -07:00
SpudGunMan
3795ae17ea Update mesh_bot.py 2025-10-28 13:04:23 -07:00
SpudGunMan
aef62bfbc3 archive 2025-10-28 12:55:14 -07:00
Kelly
cbb4bf0a3c Merge pull request #246 from SpudGunMan/copilot/support-js8call-integration
Add WSJT-X and JS8Call integration for forwarding digital mode messages to mesh network. Not fully tested Please test and let me know what needs changed
2025-10-28 12:49:00 -07:00
SpudGunMan
22ebc2bdbe refactor 2025-10-28 12:47:33 -07:00
SpudGunMan
517c6cbf82 Update config.template 2025-10-28 12:37:04 -07:00
copilot-swe-agent[bot]
2b0d7267b5 Optimize callsign matching performance
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 19:14:01 +00:00
copilot-swe-agent[bot]
ee4f910d6e Improve callsign matching to prevent false positives
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 19:11:58 +00:00
copilot-swe-agent[bot]
49c88306a0 Add tests and fix import issues for WSJT-X/JS8Call
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 19:08:15 +00:00
copilot-swe-agent[bot]
0f918ebccd Add documentation for WSJT-X and JS8Call integration
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 19:05:36 +00:00
copilot-swe-agent[bot]
69fac4ba98 Add WSJT-X and JS8Call integration support
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 19:02:48 +00:00
copilot-swe-agent[bot]
80745bec50 Initial plan 2025-10-28 18:54:26 +00:00
SpudGunMan
5afb1df41a Update llm.md 2025-10-28 11:52:08 -07:00
SpudGunMan
fbb7971cb0 Update llm.md 2025-10-28 11:36:46 -07:00
SpudGunMan
23c2d701df Update locationdata.py 2025-10-28 11:14:41 -07:00
SpudGunMan
2f1c305b06 tallestNode
enhancement to leaderboard thanks glocktuber
2025-10-28 11:05:48 -07:00
SpudGunMan
978fa19b56 refactor leaderboard load()
allow upgrades
2025-10-28 10:57:43 -07:00
SpudGunMan
b5de21a073 Update llm.md 2025-10-28 10:43:54 -07:00
SpudGunMan
f225c21c7a Update custom_scheduler.py 2025-10-28 06:16:32 -07:00
SpudGunMan
23ebb715c9 Update custom_scheduler.py 2025-10-28 06:13:20 -07:00
SpudGunMan
af0645f761 Update README.md 2025-10-28 06:02:53 -07:00
SpudGunMan
113750869f Update README.md 2025-10-28 05:48:34 -07:00
copilot-swe-agent[bot]
c2a18e9f9e Fix documentation clarity on penny rounding and overdue alerts
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 05:52:30 +00:00
copilot-swe-agent[bot]
fcaab86e71 Add comprehensive documentation for inventory and enhanced checklist
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 05:49:11 +00:00
copilot-swe-agent[bot]
47c84d91f1 Integrate inventory and enhanced checklist into mesh_bot
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 05:46:05 +00:00
copilot-swe-agent[bot]
8372817733 Add inventory/POS system and enhance checklist with time intervals
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-28 05:42:24 +00:00
copilot-swe-agent[bot]
9683d8b79e Initial plan 2025-10-28 05:35:13 +00:00
SpudGunMan
6f16fc6afb docs 2025-10-27 22:30:43 -07:00
SpudGunMan
fd971d8cc5 Update README.md
ffs
2025-10-27 22:23:05 -07:00
SpudGunMan
96193a22e8 LLM docs 2025-10-27 22:22:06 -07:00
SpudGunMan
02b0cde1c8 Update llm.py 2025-10-27 22:00:52 -07:00
SpudGunMan
40f4de02d9 Update system.py 2025-10-27 21:59:57 -07:00
SpudGunMan
0b1d626f09 refactor 2025-10-27 21:52:59 -07:00
SpudGunMan
964883cae9 Update system.py 2025-10-27 21:52:02 -07:00
SpudGunMan
6ab1102d07 Update wiki.py 2025-10-27 21:30:00 -07:00
SpudGunMan
c8d8880806 Update wiki.py 2025-10-27 21:25:12 -07:00
Kelly
21c2f7df18 Merge pull request #236 from SpudGunMan/copilot/link-llm-to-wiki-module
Add RAG support to LLM module with Wikipedia/Kiwix and OpenWebUI integration
2025-10-27 20:45:58 -07:00
SpudGunMan
cb51cf921b Update llm.py 2025-10-27 20:43:22 -07:00
SpudGunMan
908e84e155 Update README.md 2025-10-27 20:32:14 -07:00
SpudGunMan
b9eaf7deb0 Update wiki.py 2025-10-27 20:32:09 -07:00
SpudGunMan
128ac456eb Update wiki.py 2025-10-27 20:15:22 -07:00
SpudGunMan
1269214264 Update llm.py 2025-10-27 20:15:15 -07:00
SpudGunMan
4daf087fa5 Update llm.py 2025-10-27 20:03:14 -07:00
SpudGunMan
9282c63206 Update llm.md 2025-10-27 20:00:50 -07:00
SpudGunMan
710342447f Update llm.py 2025-10-27 19:26:53 -07:00
SpudGunMan
8e2c3a43fb refactor2 2025-10-27 18:50:58 -07:00
SpudGunMan
8d82823ccc refactor1 2025-10-27 17:31:47 -07:00
SpudGunMan
27789d7508 patch 2025-10-27 17:23:23 -07:00
SpudGunMan
680ba98a1c bumping version
thanks dependabot
2025-10-27 04:38:47 -07:00
SpudGunMan
4d71a64971 Update mesh_bot.py 2025-10-26 22:17:01 -07:00
SpudGunMan
d608754b5e dedupe 2025-10-26 21:51:02 -07:00
copilot-swe-agent[bot]
70ab741746 Update README with RAG and OpenWebUI documentation
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-27 03:35:36 +00:00
copilot-swe-agent[bot]
b0cf5914bf Add RAG support with Wikipedia/Kiwix and OpenWebUI integration
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-27 03:32:42 +00:00
copilot-swe-agent[bot]
434fbc3eef Initial plan 2025-10-27 03:26:02 +00:00
SpudGunMan
1186801d7e Update globalalert.py 2025-10-26 20:06:01 -07:00
SpudGunMan
902d764ca0 Update custom_scheduler.py 2025-10-26 19:41:48 -07:00
SpudGunMan
00fd29e679 Update custom_scheduler.py 2025-10-26 19:40:42 -07:00
SpudGunMan
163920b399 Update custom_scheduler.py 2025-10-26 19:36:44 -07:00
SpudGunMan
850ee2d291 Update wiki.py 2025-10-26 13:52:06 -07:00
SpudGunMan
cefbe93178 Update bbstools.md 2025-10-26 12:13:24 -07:00
SpudGunMan
44b2837ba0 fix typo for link command 2025-10-26 12:11:03 -07:00
SpudGunMan
1aa6a7a41a Update dxspot.py 2025-10-26 11:53:39 -07:00
SpudGunMan
7abd1fd704 Update dxspot.py
ffs
2025-10-26 11:51:31 -07:00
SpudGunMan
d35832caa8 Update dxspot.py 2025-10-26 11:49:56 -07:00
SpudGunMan
b4b0f2c561 Update dxspot.py 2025-10-26 11:40:50 -07:00
SpudGunMan
1e8ff95769 Update dxspot.py 2025-10-26 11:37:31 -07:00
SpudGunMan
41093be614 Update bbstools.md 2025-10-26 11:28:55 -07:00
SpudGunMan
6fe874e192 Update bbstools.md 2025-10-26 11:28:12 -07:00
SpudGunMan
3fa5d96073 Update bbstools.md 2025-10-26 11:25:03 -07:00
SpudGunMan
242c1c8741 Update README.md 2025-10-26 11:13:51 -07:00
SpudGunMan
c679cee66c dox 2025-10-26 11:06:48 -07:00
SpudGunMan
ca896c0f35 enhance 2025-10-26 10:19:52 -07:00
SpudGunMan
e3cd727cc3 enhance
filters
2025-10-26 10:11:22 -07:00
SpudGunMan
ded8470677 Update dxspot.py 2025-10-26 09:56:29 -07:00
SpudGunMan
f0b63b8b20 Update simulator.py 2025-10-26 09:54:25 -07:00
SpudGunMan
8a9c7a1147 changes to dx spotter
by is now of and xota is now ota
2025-10-26 09:42:02 -07:00
SpudGunMan
2d0e6b54b3 Update update.sh 2025-10-26 09:29:44 -07:00
SpudGunMan
0745847d3a Update update.sh 2025-10-26 09:28:54 -07:00
SpudGunMan
6c49c5c87f Update update.sh
https://github.com/SpudGunMan/meshing-around/issues/234
2025-10-26 09:24:14 -07:00
SpudGunMan
719fa95c1c Update README.md
thanks for this its been fun to help make this project for the community
2025-10-26 09:09:04 -07:00
SpudGunMan
b642961d26 Update README.md 2025-10-26 08:57:57 -07:00
SpudGunMan
f59d97f6ad Update mesh_bot.py 2025-10-26 08:48:29 -07:00
SpudGunMan
92d5f01ce5 allow days x for wx 2025-10-26 08:40:03 -07:00
SpudGunMan
39e53eb599 Update mesh_bot.py 2025-10-26 08:37:57 -07:00
SpudGunMan
9f6165503e Update locationdata.py 2025-10-26 08:28:41 -07:00
SpudGunMan
ec27ab65da Update custom_scheduler.py 2025-10-26 07:35:59 -07:00
SpudGunMan
f34eefb75a Update custom_scheduler.py 2025-10-26 07:34:21 -07:00
SpudGunMan
ce2ccb1455 Update scheduler.py 2025-10-26 07:31:16 -07:00
SpudGunMan
da144a2b89 scheduler enhancment
this brings scheduler into the 19th century
2025-10-26 07:26:54 -07:00
SpudGunMan
bbdccb382a Update dxspot.py 2025-10-26 06:02:07 -07:00
SpudGunMan
95f75b8e0a ... 2025-10-26 05:57:55 -07:00
SpudGunMan
0bf4915cd5 Update update.sh
https://github.com/SpudGunMan/meshing-around/issues/234
2025-10-26 05:51:59 -07:00
SpudGunMan
f83793acc9 Update README.md 2025-10-26 05:43:10 -07:00
SpudGunMan
abb2fa6b61 Update README.md 2025-10-26 05:42:51 -07:00
SpudGunMan
6d90d6f207 dx command 2025-10-26 05:37:04 -07:00
SpudGunMan
9c9e9a02e6 Update globalalert.py 2025-10-25 21:01:47 -07:00
SpudGunMan
80fc795f35 enhance 2025-10-25 18:50:12 -07:00
SpudGunMan
166c49854f Update config.template 2025-10-25 18:37:44 -07:00
SpudGunMan
a685fc3a9b Update config.template 2025-10-25 18:37:22 -07:00
SpudGunMan
91da1a4c58 Update update.sh 2025-10-25 18:34:06 -07:00
SpudGunMan
9889fd0da8 logs 2025-10-25 18:29:45 -07:00
Kelly
bebd9352ea Merge pull request #231 from SpudGunMan/copilot/add-mesh-bot-timer
Add systemd timer for daily mesh_bot_w3.service execution at 4:20 AM
2025-10-25 17:30:15 -07:00
SpudGunMan
fd1cd2a44c patch 2025-10-25 17:28:16 -07:00
SpudGunMan
ac55a51c87 # messages
https://github.com/SpudGunMan/meshing-around/issues/233
2025-10-25 16:01:54 -07:00
SpudGunMan
86144cd888 Update scheduler.py 2025-10-25 13:12:55 -07:00
SpudGunMan
d7a37ce9f1 haha 2025-10-25 12:59:59 -07:00
SpudGunMan
da7035dfed Update joke.py 2025-10-25 12:28:01 -07:00
SpudGunMan
da500981a2 Update mesh_bot.py 2025-10-25 12:23:48 -07:00
SpudGunMan
b4dc2207a6 refactor scheduler 2025-10-25 12:22:02 -07:00
SpudGunMan
b69a187466 Update mesh_bot.py 2025-10-25 12:10:57 -07:00
SpudGunMan
66d143d68e fix 2025-10-25 11:37:45 -07:00
SpudGunMan
49f2dcff88 Update test_bot.py 2025-10-25 10:59:46 -07:00
SpudGunMan
2c3c3fed10 Update INSTALL.md 2025-10-25 08:45:07 -07:00
SpudGunMan
2872fb040e refactor 2025-10-25 08:35:16 -07:00
copilot-swe-agent[bot]
6097ff899c Add explicit Unit directive to mesh_bot_w3.timer
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-25 14:02:49 +00:00
copilot-swe-agent[bot]
aec75d598a Add systemd timer to run mesh_bot_w3.service daily at 4:20 am
Co-authored-by: SpudGunMan <12676665+SpudGunMan@users.noreply.github.com>
2025-10-25 13:59:45 +00:00
copilot-swe-agent[bot]
0640fdbbae Initial plan 2025-10-25 13:55:40 +00:00
SpudGunMan
8312f4e683 Update system.py 2025-10-24 23:07:14 -07:00
SpudGunMan
990ea4f4e4 leaderboard fix messages 2025-10-24 21:57:16 -07:00
SpudGunMan
9acf9df3bb fix messages win 2025-10-24 21:47:32 -07:00
SpudGunMan
37942e950e fixLeaderboardLoading 2025-10-24 21:45:29 -07:00
SpudGunMan
8a0e1cba7c Update bbstools.py 2025-10-24 21:25:58 -07:00
SpudGunMan
8d0a53ec3e Update system.py 2025-10-24 21:24:18 -07:00
SpudGunMan
2ea3917eba Alllllllllll the rssssss'ssss
srsly I hope this time
2025-10-24 21:20:16 -07:00
SpudGunMan
75410c98e3 sweep 2025-10-24 20:40:23 -07:00
SpudGunMan
10171a712e moar cleanup 🧹
tighter memory control
2025-10-24 20:32:30 -07:00
SpudGunMan
fa76a76203 BIG OLD PATCH 🍠
pz days ... haha. I hope this works.
fancy potato
2025-10-24 19:54:46 -07:00
SpudGunMan
e0e275a49c Revert "scheduler memory issue"
This reverts commit bf39c2f088.
2025-10-24 18:30:08 -07:00
SpudGunMan
bf39c2f088 scheduler memory issue 2025-10-24 18:19:35 -07:00
SpudGunMan
34d36057c1 ATOM FEEDS
oh yea its 2003
2025-10-24 18:05:08 -07:00
SpudGunMan
4e1d1de883 Update adding_more.md 2025-10-24 17:52:24 -07:00
SpudGunMan
97f103dfd7 Update test_bot.py 2025-10-24 17:46:15 -07:00
SpudGunMan
47089871b1 Update test_bot.py 2025-10-24 17:45:16 -07:00
SpudGunMan
cc7ef129f6 Update test_bot.py 2025-10-24 17:42:50 -07:00
SpudGunMan
0fa5d06a3a Update test_bot.py 2025-10-24 17:12:06 -07:00
SpudGunMan
7fc44ec06e Update README.md 2025-10-24 17:12:00 -07:00
SpudGunMan
184760096e game test unit
🧩
2025-10-24 17:01:43 -07:00
SpudGunMan
8868d10388 Update hangman.py 2025-10-24 16:53:43 -07:00
SpudGunMan
1ce2ecd75c Update README.md 2025-10-24 16:50:04 -07:00
SpudGunMan
69e1c21488 enhance hangman.json
example JSON: [\"apple\",\"banana\",\"cherry\"]
2025-10-24 16:46:57 -07:00
SpudGunMan
97a2ffce7b gamepackFix
clean up globals
2025-10-24 15:46:11 -07:00
SpudGunMan
4c0d3a597e Update test_bot.py 2025-10-24 14:58:32 -07:00
SpudGunMan
094f7e61a0 Update wiki.py
fixed
2025-10-24 13:22:36 -07:00
SpudGunMan
a54ecaa5a1 Update mesh_bot.py 2025-10-24 13:21:49 -07:00
SpudGunMan
bd12392d69 Update system.py
doh
2025-10-24 13:05:40 -07:00
SpudGunMan
882bcf3f4b wiki wiki 2025-10-24 13:02:13 -07:00
SpudGunMan
c0d0ca3743 Update compose.yaml 2025-10-24 12:59:29 -07:00
SpudGunMan
d74d848646 Update compose.yaml 2025-10-24 12:58:16 -07:00
SpudGunMan
2afb915b56 Update test_bot.py 2025-10-24 12:50:58 -07:00
SpudGunMan
d5e48bead1 Update compose.yaml 2025-10-24 12:46:27 -07:00
SpudGunMan
3c80848f61 refactor wikipedia
also removed that old package!!!
2025-10-24 12:45:24 -07:00
SpudGunMan
64345fe47a Update wiki.py 2025-10-24 12:30:18 -07:00
SpudGunMan
32f734d69b Update wiki.py 2025-10-24 12:27:30 -07:00
SpudGunMan
aa6de00c5b Update wiki.py 2025-10-24 12:26:24 -07:00
SpudGunMan
6df4ba5756 Update test_bot.py
risky stuff lower
2025-10-24 12:13:50 -07:00
SpudGunMan
a11a2780db Update bbstools.py 2025-10-24 11:55:39 -07:00
SpudGunMan
980414f872 Update test_bot.py 2025-10-24 11:23:04 -07:00
SpudGunMan
f26334d625 Update wiki.py 2025-10-24 11:22:52 -07:00
SpudGunMan
24546b28d6 Create test_bot.py 2025-10-24 10:33:19 -07:00
SpudGunMan
f33da848cd cleanup 2025-10-24 10:32:28 -07:00
SpudGunMan
57ce15de4e Update radio.py 2025-10-24 10:19:05 -07:00
SpudGunMan
b8886e0662 Update qrz.py 2025-10-24 10:16:41 -07:00
SpudGunMan
9a1e86f25e Update qrz.py 2025-10-24 10:13:33 -07:00
SpudGunMan
fa8021ab5a Update checklist.py 2025-10-24 10:06:11 -07:00
SpudGunMan
f3917f1c3d Update locationdata.py 2025-10-24 10:00:35 -07:00
SpudGunMan
c1443048fd Update llm.py 2025-10-24 09:39:57 -07:00
SpudGunMan
da430557f3 Update filemon.py 2025-10-24 09:36:07 -07:00
SpudGunMan
84152bda65 Update checklist.py 2025-10-24 09:35:53 -07:00
SpudGunMan
b6e80ae576 Update bbstools.py 2025-10-24 09:18:31 -07:00
SpudGunMan
18ac26864c better resolution for gametracker
thanks pdx
2025-10-24 08:24:05 -07:00
SpudGunMan
b661fbc750 Revert "fix init of trackers"
This reverts commit 3049d18663.
2025-10-24 08:20:36 -07:00
SpudGunMan
3049d18663 fix init of trackers
thanks @pdxlocations
2025-10-24 08:12:24 -07:00
SpudGunMan
126f81fbd3 Update README.md 2025-10-23 23:38:26 -07:00
SpudGunMan
337d43a7af Update README.md 2025-10-23 23:32:53 -07:00
SpudGunMan
8c3121d5d6 Update entrypoint.sh 2025-10-23 23:29:26 -07:00
SpudGunMan
1d577c9ec5 Update compose.yaml 2025-10-23 23:12:28 -07:00
SpudGunMan
3540b8f110 Update entrypoint.sh 2025-10-23 23:10:15 -07:00
SpudGunMan
3fdebf3bf9 Update entrypoint.sh 2025-10-23 23:07:54 -07:00
SpudGunMan
430279809e Update entrypoint.sh 2025-10-23 23:06:02 -07:00
SpudGunMan
7ba3a78718 Update entrypoint.sh 2025-10-23 23:04:25 -07:00
SpudGunMan
c329391450 Update entrypoint.sh 2025-10-23 23:03:55 -07:00
SpudGunMan
6bc3c3e980 Update entrypoint.sh 2025-10-23 23:00:36 -07:00
SpudGunMan
48788ceda8 Update entrypoint.sh 2025-10-23 22:57:50 -07:00
SpudGunMan
8f5bae3b05 Update README.md 2025-10-23 22:55:18 -07:00
SpudGunMan
17c693c2f7 Update entrypoint.sh 2025-10-23 22:55:16 -07:00
SpudGunMan
ff91356c2a Update compose.yaml 2025-10-23 22:45:54 -07:00
SpudGunMan
180d9f4728 Update compose.yaml 2025-10-23 22:43:27 -07:00
SpudGunMan
1202a076d1 Update compose.yaml 2025-10-23 22:42:55 -07:00
SpudGunMan
9b62d7f4d8 Update compose.yaml 2025-10-23 22:42:00 -07:00
SpudGunMan
9451d23c09 Update compose.yaml 2025-10-23 22:40:48 -07:00
SpudGunMan
dcdef40e89 Update Dockerfile 2025-10-23 22:28:25 -07:00
SpudGunMan
817dde42f2 Update Dockerfile 2025-10-23 22:23:39 -07:00
SpudGunMan
b384d2d5b1 Update entrypoint.sh 2025-10-23 22:18:56 -07:00
SpudGunMan
4db46f16f2 Update compose.yaml 2025-10-23 22:15:12 -07:00
SpudGunMan
5590391f7e Update compose.yaml
i get no security!
2025-10-23 22:11:22 -07:00
SpudGunMan
ccb505f37f confounded 2025-10-23 22:09:34 -07:00
SpudGunMan
d883927572 enhance 2025-10-23 21:57:50 -07:00
SpudGunMan
b0109be3b0 Update README.md 2025-10-23 21:52:13 -07:00
SpudGunMan
98af757d93 enhance 2025-10-23 21:50:54 -07:00
SpudGunMan
f8746ff348 Create null 2025-10-23 21:46:59 -07:00
SpudGunMan
32fbfba3e9 config 2025-10-23 21:45:23 -07:00
SpudGunMan
d2501bf353 Update compose.yaml 2025-10-23 21:38:19 -07:00
SpudGunMan
db9d7d9790 Update README.md 2025-10-23 21:34:56 -07:00
SpudGunMan
c6b5a1c708 docker
i never enjoy docker
2025-10-23 21:26:32 -07:00
SpudGunMan
a36f1580b3 Update entrypoint.sh 2025-10-23 21:24:59 -07:00
SpudGunMan
f051e95986 Update entrypoint.sh 2025-10-23 21:24:45 -07:00
SpudGunMan
bafcfad190 Update entrypoint.sh 2025-10-23 21:21:43 -07:00
SpudGunMan
8b2059c444 Update compose.yaml 2025-10-23 21:07:43 -07:00
SpudGunMan
fd4b5607d7 Update compose.yaml 2025-10-23 21:02:42 -07:00
SpudGunMan
df30ee9cc4 Update compose.yaml 2025-10-23 20:53:26 -07:00
SpudGunMan
c1135ecadf cleanup 2025-10-23 20:40:00 -07:00
SpudGunMan
899702eecc Update compose.yaml 2025-10-23 20:26:55 -07:00
SpudGunMan
d4604d8cbd Update compose.yaml 2025-10-23 20:23:03 -07:00
SpudGunMan
c674b0a404 Update scheduler.py 2025-10-23 20:18:42 -07:00
SpudGunMan
d59ddfd517 fix for Malice
sorry this was so painfull
2025-10-23 20:15:15 -07:00
SpudGunMan
f68c533488 errorLoggin 2025-10-23 20:08:42 -07:00
SpudGunMan
6e47d71028 Update Dockerfile 2025-10-23 20:00:02 -07:00
SpudGunMan
f9af9b756d Update compose.yaml
how did I miss this
2025-10-23 19:57:18 -07:00
SpudGunMan
c19d442190 Update compose.yaml 2025-10-23 19:53:01 -07:00
SpudGunMan
4c2d0cdebb Update compose.yaml 2025-10-23 19:49:01 -07:00
SpudGunMan
612dbf01d3 Update compose.yaml 2025-10-23 19:47:40 -07:00
SpudGunMan
28846b24a6 Update compose.yaml 2025-10-23 19:40:43 -07:00
SpudGunMan
cd398375a2 Update compose.yaml 2025-10-23 19:33:40 -07:00
SpudGunMan
01372a0f2c Update compose.yaml 2025-10-23 19:30:08 -07:00
SpudGunMan
8254ec5baf Update compose.yaml 2025-10-23 19:23:25 -07:00
SpudGunMan
549e12ffc1 Update Dockerfile 2025-10-23 18:50:47 -07:00
SpudGunMan
d940cdf534 Update Dockerfile 2025-10-23 15:57:34 -07:00
78 changed files with 7101 additions and 1620 deletions

View File

@@ -25,10 +25,10 @@ jobs:
#
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v5
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
uses: docker/login-action@28fdb31ff34708d19615a74d67103ddc2ea9725c
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@@ -36,7 +36,7 @@ jobs:
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
uses: docker/metadata-action@032a4b3bda1b716928481836ac5bfe36e1feaad6
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
@@ -44,7 +44,7 @@ jobs:
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
- name: Build and push Docker image
id: push
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
uses: docker/build-push-action@9e436ba9f2d7bcd1d038c8e55d039d37896ddc5d
with:
context: .
push: true
@@ -53,7 +53,7 @@ jobs:
# This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see [Using artifact attestations to establish provenance for builds](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds).
- name: Generate artifact attestation
uses: actions/attest-build-provenance@v2
uses: actions/attest-build-provenance@v3
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}

4
.gitignore vendored
View File

@@ -25,6 +25,10 @@ data/rag/*
# qrz db
data/qrz.db
# checklist and inventory databases
data/checklist.db
data/inventory.db
# fileMonitor test file
bee.txt

View File

@@ -8,7 +8,13 @@ ENV PYTHONUNBUFFERED=1 \
TZ=America/Los_Angeles
RUN apt-get update && \
apt-get install -y gettext tzdata locales nano && \
apt-get install -y \
build-essential \
python3-dev \
gettext \
tzdata \
locales \
nano && \
sed -i 's/^# *\(en_US.UTF-8 UTF-8\)/\1/' /etc/locale.gen && \
locale-gen en_US.UTF-8 && \
rm -rf /var/lib/apt/lists/*
@@ -17,7 +23,7 @@ WORKDIR /app
# Install dependencies first for better caching
COPY requirements.txt /app/
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir -r /app/requirements.txt
# Copy the rest of the application
COPY . /app
@@ -26,7 +32,12 @@ COPY config.template /app/config.ini
RUN chmod +x /app/script/docker/entrypoint.sh
# Add a non-root user and switch to it
RUN useradd -m appuser
USER appuser
# RUN useradd -m appuser && usermod -a -G dialout appuser
# USER appuser
# Expose Meshtastic TCP API port from the host
#EXPOSE 4403
# Meshing Around Web Dashboard port
#EXPOSE 8420
ENTRYPOINT ["/bin/bash", "/app/script/docker/entrypoint.sh"]

View File

@@ -2,6 +2,9 @@
## Table of Contents
- [Manual Install](#manual-install)
- [Docker Installation](#docker-installation)
- [Requirements](#requirements)
- [install.sh](#installsh)
- [Purpose](#purpose)
- [Usage](#usage)
@@ -22,68 +25,69 @@
---
### Manual Install
Install the required dependencies using pip:
## Manual Install
Install all required dependencies using pip:
```sh
pip install -r requirements.txt
```
Copy the configuration template to `config.ini` and edit it to suit your needs:
Copy the configuration template and edit as needed:
```sh
cp config.template config.ini
```
---
### Docker Installation - handy for windows
See further info on the [docker.md](script/docker/README.md)
### Requirements
Python 3.8? or later is needed (docker on 3.13). The following can be installed with `pip install -r requirements.txt` or using the [install.sh](install.sh) script for venv and automation:
## Docker Installation
```sh
pip install meshtastic
pip install pubsub
```
See [script/docker/README.md](script/docker/README.md) for Docker-based setup instructions.
Docker is recommended for Windows or if you want an isolated environment.
Mesh-bot enhancements:
---
```sh
pip install pyephem
pip install requests
pip install geopy
pip install maidenhead
pip install beautifulsoup4
pip install dadjokes
pip install schedule
pip install wikipedia
```
## Requirements
For the Ollama LLM:
- **Python 3.8 or later** (Python 3.13+ supported in Docker)
- All dependencies are listed in `requirements.txt` and can be installed with:
```sh
pip install -r requirements.txt
```
- To enable emoji in the Debian/Ubuntu console:
```sh
sudo apt-get install fonts-noto-color-emoji
```
- For Ollama LLM support, see the prompts during `install.sh` or visit [https://ollama.com](https://ollama.com).
```sh
pip install googlesearch-python
```
To enable emoji in the Debian console, install the fonts:
```sh
sudo apt-get install fonts-noto-color-emoji
```
---
## install.sh
### Purpose
`install.sh` is an installation and setup script for the Meshing Around Bot project. It automates installing dependencies, configuring the environment, setting up system services, and preparing the bot for use on Linux systems (especially Debian/Ubuntu/Raspberry Pi and embedded devices).
`install.sh` automates installation, configuration, and service setup for the Meshing Around Bot project. It is designed for Linux systems (Debian/Ubuntu/Raspberry Pi and embedded devices).
### Usage
Run this script from the project root directory:
Run from the project root directory:
```sh
bash install.sh
```
To uninstall:
```sh
bash install.sh --nope
```
### What it does
- Checks for existing installations and required permissions.
- Optionally moves the project to `/opt/meshing-around` for standardization.
- Installs Python and pip if not present (unless on embedded systems).
- Checks for existing installations and permissions.
- Optionally moves the project to `/opt/meshing-around`.
- Installs Python and pip if missing (unless on embedded systems).
- Adds the current user (or a dedicated `meshbot` user) to necessary groups for serial and Bluetooth access.
- Copies and configures systemd service files for running the bot as a service.
- Sets up configuration files, updating latitude/longitude automatically.
@@ -95,11 +99,13 @@ bash install.sh
- Offers to reboot the system to complete setup.
### When to use
- For first-time installation of the Meshing Around Bot.
- When migrating to a new device or environment.
- After cloning or updating the repository to set up dependencies and services.
### Note
- You may be prompted for input during installation (e.g., for embedded mode, virtual environment, or optional features).
- Review and edit the script if you have custom requirements or are running on a non-standard system.
@@ -108,10 +114,13 @@ bash install.sh
## update.sh
### Purpose
`update.sh` is an update and maintenance script for the Meshing Around Bot project. It automates the process of safely updating your codebase, backing up data, and merging configuration changes.
### Usage
Run this script from the project root directory:
Run from the project root directory:
```sh
bash update.sh
```
@@ -122,6 +131,7 @@ chmod +x update.sh
```
### What it does
- Stops running Mesh Bot services to prevent conflicts during update.
- Fetches and pulls the latest changes from the GitHub repository (using `git pull --rebase`).
- Handles git conflicts, offering to reset to the latest remote version if needed.
@@ -132,10 +142,12 @@ chmod +x update.sh
- Provides status messages and logs for troubleshooting.
### When to use
- To update your Mesh Bot installation to the latest version.
- Before making significant changes or troubleshooting, as it creates a backup of your data.
### Note
- Review `ini_merge_log.txt` and `config_new.ini` after running for any configuration changes or errors.
- You may be prompted if git conflicts are detected.
@@ -144,9 +156,11 @@ chmod +x update.sh
## launch.sh
### Purpose
`launch.sh` is a convenience script for starting the Mesh Bot, Pong Bot, or generating reports within the Python virtual environment. It ensures the correct environment is activated and the appropriate script is run.
### How to Use
From your project root, run one of the following commands:
- Launch Mesh Bot:
@@ -171,6 +185,7 @@ From your project root, run one of the following commands:
```
### What it does
- Ensures you are in the project directory.
- Copies `config.template` to `config.ini` if no config exists.
- Activates the Python virtual environment (`venv`).
@@ -178,6 +193,7 @@ From your project root, run one of the following commands:
- Deactivates the virtual environment when done.
### Note
- The script requires a Python virtual environment (`venv`) to be present in the project directory.
- If `venv` is missing, the script will exit with an error message.
- Always provide an argument (`mesh`, `pong`, `html`, `html5`, or `add`) to specify what you want to launch.

View File

@@ -40,9 +40,9 @@ Mesh Bot is a feature-rich Python bot designed to enhance your [Meshtastic](http
- **New Node Greetings**: Automatically greet new nodes via text.
### Interactive AI and Data Lookup
- **Weather, Earthquake, River, and Tide Data**: Get local alerts and info from NOAA/USGS; uses Open-Meteo for areas outside NOAA coverage.
- **Weather, Earthquake, River, and Tide Data**: Get local alerts and info from NOAA/USGS; uses Open-Meteo for areas outside NOAA coverage. Global tide predictions available via tidepredict library for worldwide locations.
- **Wikipedia Search**: Retrieve summaries from Wikipedia.
- **Ollama LLM Integration**: Query the [Ollama](https://github.com/ollama/ollama/tree/main/docs) AI for advanced responses.
- **OpenWebUI, Ollama LLM Integration**: Query the [Ollama](https://github.com/ollama/ollama/tree/main/docs) AI for advanced responses. Supports RAG (Retrieval Augmented Generation) with Wikipedia/Kiwix context and [OpenWebUI](https://github.com/open-webui/open-webui) integration for enhanced AI capabilities. [LLM Readme](modules/llm.md)
- **Satellite Passes**: Find upcoming satellite passes for your location.
- **GeoMeasuring Tools**: Calculate distances and midpoints using collected GPS data; supports Fox & Hound direction finding.
@@ -56,9 +56,11 @@ Mesh Bot is a feature-rich Python bot designed to enhance your [Meshtastic](http
- **SNR RF Activity Alerts**: Monitor radio frequencies and receive alerts when high SNR (Signal-to-Noise Ratio) activity is detected.
- **Hamlib Integration**: Use Hamlib (rigctld) to monitor the S meter on a connected radio.
- **Speech-to-Text Broadcasting**: Convert received audio to text using [Vosk](https://alphacephei.com/vosk/models) and broadcast it to the mesh.
- **WSJT-X Integration**: Monitor WSJT-X (FT8, FT4, WSPR, etc.) decode messages and forward them to the mesh network with optional callsign filtering.
- **JS8Call Integration**: Monitor JS8Call messages and forward them to the mesh network with optional callsign filtering.
### Check-In / Check-Out & Asset Tracking
- **Asset Tracking**: Maintain a check-in/check-out list for nodes or assets—ideal for accountability of people and equipment (e.g., Radio-Net, FEMA, trailhead groups).
### Asset Tracking, Check-In/Check-Out, and Inventory Management
Advanced check-in/check-out and asset tracking for people and equipment—ideal for accountability, safety monitoring, and logistics (e.g., Radio-Net, FEMA, trailhead groups). Admin approval workflows, GPS location capture, and overdue alerts. The integrated inventory and point-of-sale (POS) system enables item management, sales tracking, cart-based transactions, and daily reporting, for swaps, emergency supply management, and field operations, maker-places.
### Fun and Games
- **Built-in Games**: Play classic games like DopeWars, Lemonade Stand, BlackJack, and Video Poker directly via DM.
@@ -110,13 +112,18 @@ git clone https://github.com/spudgunman/meshing-around
- **Automated Installation**: [install.sh](INSTALL.md) will automate optional venv and requirements installation.
- **Launch Script**: [laynch.sh](INSTALL.md) only used in a venv install, to launch the bot and the report generator.
### Docker Installation - handy for windows
See further info on the [docker.md](script/docker/README.md)
### Docker Installation
Good for windows or OpenWebUI enabled bots
## Full list of commands for the bot
[docker.md](script/docker/README.md)
## Module Help
Configuration Guide
[modules/README.md](modules/README.md)
### Games (via DM only)
### Game Help
Games are DM only by default
[modules/games/README.md](modules/games/README.md)
### Firmware 2.6 DM Key, and 2.7 CLIENT_BASE Favorite Nodes
@@ -158,21 +165,18 @@ I used ideas and snippets from other responder bots and want to call them out!
- ARRL Question Pool Data from https://github.com/russolsen/ham_radio_question_pool
### Special Thanks
- **xdep**: For the reporting tools.
- **Nestpebble**: For new ideas and enhancements.
- **mrpatrick1991**: For Docker configurations.
- **[https://github.com/A-c0rN](A-c0rN)**: Assistance with iPAWS and EAS
For testing and feature ideas on Discord and GitHub, if its stable its thanks to you all.
- **PiDiBi, Cisien, bitflip, nagu, Nestpebble, NomDeTom, Iris, Josh, GlockTuber, FJRPiolt, dj505, Woof, propstg, snydermesh, trs2982, F0X, Malice, mesb1, Hailo1999**
- **xdep**: For the reporting html. 📊
- **mrpatrick1991**: For OG Docker configurations. 💻
- **A-c0rN**: Assistance with iPAWS and 🚨
- **Mike O'Connell/skrrt**: For [eas_alert_parser](etc/eas_alert_parser.py) enhanced by **sheer.cold**
- **PiDiBi**: For looking at test functions and other suggestions like wxc, CPU use, and alerting ideas.
- **WH6GXZ nurse dude**: For bashing on installer, Volcano Alerts 🌋
- **Josh**: For more bashing on installer!
- **dj505**: trying it on windows!
- **mikecarper**: ideas, and testing. hamtest
- **c.merphy360**: high altitude alerts
- **Iris**: testing and finding 🐞
- **FJRPiolt**: testing bugs out!!
- **Cisien, bitflip, Woof, propstg, snydermesh, trs2982, F0X, Malice, mesb1, and Hailo1999**: For testing and feature ideas on Discord and GitHub.
- **Meshtastic Discord Community**: For tossing out ideas and testing code.
- **WH6GXZ nurse dude**: Volcano Alerts 🌋
- **mikecarper**: hamtest, leading to quiz etc.. 📋
- **c.merphy360**: high altitude alerts. 🚀
- **G7KSE**: DX Spotting idea. 📻
- **Growing List of GitHub Contributers**
- **Meshtastic Discord Community**: For putting up with 🥔
### Tools
- **Node Backup Management**: [Node Slurper](https://github.com/SpudGunMan/node-slurper)

View File

@@ -1,38 +1,60 @@
# Docker Compose configuration for Meshing Around.
# This setup includes the main Meshing Around service, with optional Ollama and Prometheus Node Exporter services.
# Adjust device mappings, ports, and configurations as needed for your environment.
services:
meshing-around:
stdin_open: true
tty: true
ports:
- 8420:8420
devices: # Optional if using meshtasticd. Pass through radio device.
- /dev/ttyUSB0 # Replace this with your actual device!
#- /dev/ttyAMA0 # Example
#devices:
#- /dev/ttyUSB0:/dev/tty #update your config.ini to /dev/tty
#- /dev/ttyACM0:/dev/tty #if using serial select proper port
volumes:
- /data/meshing-around/config.ini:/app/config.ini:rw
image: ghcr.io/SpudGunMan/meshing-around:test-all-changes
- .:/app:rw
image: ghcr.io/spudgunman/meshing-around:main
container_name: meshing-around
restart: unless-stopped
environment:
- OLLAMA_API_URL=http://ollama:11434
extra_hosts:
#- "host.docker.internal:host-gateway" # Enables access to host services from within the container.
user: "1000:1000" # run as non-root user for better security
- "host.docker.internal:host-gateway"
#user: "1000:1000"
#user: "10999:10999"
networks:
- meshing-around-network
meshtasticd: # Runs a virtual node. Optional, but can be used to link meshing-around directly to mqtt.
test-bot:
image: ghcr.io/spudgunman/meshing-around:main
container_name: test-bot
command: ["/bin/bash", "-c", "python3 modules/test_bot.py | tee /tmp/test_tmp.txt; if grep -E 'failures=|errors=' /tmp/test_tmp.txt; then cp /tmp/test_tmp.txt /app/test_results.txt; fi"]
volumes:
- .:/app:rw
networks:
- meshing-around-network
stdin_open: true
debug-console:
image: ghcr.io/spudgunman/meshing-around:main
container_name: debug-console
command: ["/bin/bash"]
stdin_open: true
tty: true
volumes:
- .:/app:rw
networks:
- meshing-around-network
meshtasticd:
ports:
- 4403:4403
- 443:443
volumes:
- ./script/docker:/etc/meshtasticd:rw
restart: unless-stopped
container_name: meshtasticd
image: meshtastic/meshtasticd:beta
image: meshtastic/meshtasticd:daily-debian
networks:
- meshing-around-network
ollama: # Used for enabling LLM interactions.
ollama:
ports:
- 11434:11434 # Ollama API port
volumes:
- /data/ollama:/root/.ollama
- 11434:11434
container_name: ollama
image: ollama/ollama:latest
restart: unless-stopped
@@ -40,4 +62,10 @@ services:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 30s
timeout: 10s
retries: 5
retries: 5
networks:
- meshing-around-network
networks:
meshing-around-network:
external: true

View File

@@ -57,9 +57,9 @@ spaceWeather = True
# enable or disable the RSS module, and truncate the story
rssEnable = True
rssFeedURL = http://www.hackaday.com/rss.xml,http://rss.slashdot.org/Slashdot/slashdotMain
rssFeedURL = http://www.hackaday.com/rss.xml,http://rss.slashdot.org/Slashdot/slashdotMain,http://www.reddit.com/r/meshtastic/.rss
# RSS feed names must match the order of the URLs above, default is used if no match
rssFeedNames = default,slashdot
rssFeedNames = default,slashdot,mesh
rssMaxItems = 3
rssTruncate = 100
@@ -75,15 +75,28 @@ kiwixLibraryName = wikipedia_en_100_nopic_2025-09
# Enable ollama LLM see more at https://ollama.com
ollama = False
# Ollama model to use (defaults to gemma3:270m)
# Ollama model to use (defaults to gemma3:270m) gemma2 is good for older SYSTEM prompt
# ollamaModel = gemma3:latest
# ollamaModel = gemma2:2b
# server instance to use (defaults to local machine install)
ollamaHostName = http://localhost:11434
# Produce LLM replies to messages that aren't commands?
# If False, the LLM only replies to the "ask:" and "askai" commands.
llmReplyToNonCommands = True
# if True, the input is sent raw to the LLM, if False uses legacy template query
rawLLMQuery = True
# if True, the input is sent raw to the LLM, if False uses SYSTEM prompt
rawLLMQuery = True
# Enable Wikipedia/Kiwix integration with LLM for RAG (Retrieval Augmented Generation)
# When enabled, LLM will automatically search Wikipedia/Kiwix and include context in responses
llmUseWikiContext = False
# Use OpenWebUI instead of direct Ollama API (enables advanced RAG features)
useOpenWebUI = False
# OpenWebUI server URL (e.g., http://localhost:3000)
openWebUIURL = http://localhost:3000
# OpenWebUI API key/token (required when useOpenWebUI is True)
openWebUIAPIKey =
# StoreForward Enabled and Limits
StoreForward = True
@@ -198,6 +211,11 @@ NOAAalertCount = 2
# use Open-Meteo API for weather data not NOAA useful for non US locations
UseMeteoWxAPI = False
# Global Tide Prediction using tidepredict (for non-US locations or offline use)
# When enabled, uses tidepredict library for global tide predictions instead of NOAA API
# tidepredict uses University of Hawaii's Research Quality Dataset for worldwide coverage
useTidePredict = False
# NOAA Coastal Data Enable NOAA Coastal Waters Forecasts and Tide
coastalEnabled = False
# Find the correct costal weather directory at https://tgftp.nws.noaa.gov/data/forecasts/marine/coastal/
@@ -259,6 +277,15 @@ enabled = False
checklist_db = data/checklist.db
reverse_in_out = False
# Inventory and Point of Sale System
[inventory]
enabled = False
inventory_db = data/inventory.db
# Set to True to disable penny precision and round to nickels (USA cash sales)
# When True: cash sales round down, taxed sales round up to nearest $0.05
# When False (default): normal penny precision ($0.01)
disable_penny = False
[qrz]
# QRZ Hello to new nodes with message
enabled = False
@@ -287,8 +314,10 @@ message = "MeshBot says Hello! DM for more info."
# enable overides the above and uses the motd as the message
schedulerMotd = False
# value can be min,hour,day,mon,tue,wed,thu,fri,sat,sun.
# value can also be joke (everyXmin) or weather (hour) or link (hour) for special auto messages
# custom for module/scheduler.py custom schedule examples
# value can also be 'joke' (min/interval), 'weather' (time/day), 'link' (hour/interval) for special auto messages
# or 'news' (hour/interval), 'readrss' (hour/interval), 'mwx' (time/day), 'sysinfo' (hour/interval),
# 'tide' (time/day), 'solar' (time/day) for automated information broadcasts, matching module needs enabled!
# 'custom' for module/scheduler.py custom schedule examples
value =
# interval to use when time is not set (e.g. every 2 days)
interval =
@@ -296,13 +325,17 @@ interval =
time =
[radioMon]
# using Hamlib rig control will monitor and alert on channel use
enabled = False
rigControlServerAddress = localhost:4532
# device interface to send the message to
# dx cluster `dx` command
dxspotter_enabled = True
# alerts in this module use the following interface and channel
sigWatchBroadcastInterface = 1
# broadcast channel can also be a comma separated list of channels
sigWatchBroadcastCh = 2
# using Hamlib rig control will monitor and alert on channel use
enabled = False
rigControlServerAddress = 127.0.0.1:4532
# minimum SNR as reported by radio via hamlib
signalDetectionThreshold = -10
# hold time for high SNR
@@ -310,17 +343,37 @@ signalHoldTime = 10
# the following are combined to reset the monitor
signalCooldown = 5
signalCycleLimit = 5
# enable VOX detection using default input
# Enable VOX detection using default input
voxDetectionEnabled = False
# description to use in the alert message
voxDescription = VOX
useLocalVoxModel = False
# default language for VOX detection
voxLanguage = en-us
# sound.card input device to use for VOX detection, 'default' uses system default
voxInputDevice = default
# "hey chirpy"
voxOnTrapList = True
voxTrapList = chirpy
# allow use of 'weather' and 'joke' commands via VOX
voxEnableCmd = True
# WSJT-X UDP monitoring - listens for decode messages from WSJT-X, FT8/FT4/WSPR etc.
wsjtxDetectionEnabled = False
# UDP address and port where WSJT-X broadcasts (default: 127.0.0.1:2237)
wsjtxUdpServerAddress = 127.0.0.1:2237
# Comma-separated list of callsigns to watch (empty = all callsigns)
wsjtxWatchedCallsigns =
# JS8Call TCP monitoring - connects to JS8Call API for message forwarding
js8callDetectionEnabled = False
# TCP address and port where JS8Call API listens (default: 127.0.0.1:2442)
js8callServerAddress = 127.0.0.1:2442
# Comma-separated list of callsigns to watch (empty = all callsigns)
js8callWatchedCallsigns =
[fileMon]
filemon_enabled = False

View File

@@ -0,0 +1,264 @@
# Implementation Summary: Enhanced Check-in/Check-out and Point of Sale System
## Overview
This implementation addresses the GitHub issue requesting enhancements to the check-in/check-out system and the addition of a complete Point of Sale (POS) functionality to the meshing-around project.
## What Was Implemented
### 1. Enhanced Check-in/Check-out System
#### New Features Added:
- **Time Window Monitoring**: Check-in with safety intervals (e.g., `checkin 60 Hunting in tree stand`)
- Tracks if users don't check in within expected timeframe
- Ideal for solo activities, remote work, or safety accountability
- Provides `get_overdue_checkins()` function for alert integration
- **Approval Workflow**:
- `checklistapprove <id>` - Approve pending check-ins (admin)
- `checklistdeny <id>` - Deny/remove check-ins (admin)
- Support for approval-based workflows
- **Enhanced Database Schema**:
- Added `approved` field for approval workflows
- Added `expected_checkin_interval` field for safety monitoring
- Automatic migration for existing databases
#### New Commands:
- `checklistapprove <id>` - Approve a check-in
- `checklistdeny <id>` - Deny a check-in
- Enhanced `checkin [interval] [note]` - Now supports interval parameter
### 2. Complete Point of Sale System
#### Features Implemented:
**Item Management:**
- Add items with price, quantity, and location
- Remove items from inventory
- Update item prices and quantities
- Quick sell functionality
- Transaction returns/reversals
- Full inventory listing with valuations
**Cart System:**
- Per-user shopping carts
- Add/remove items from cart
- View cart with totals
- Complete transactions (buy/sell)
- Clear cart functionality
**Financial Features:**
- Penny rounding support (USA mode)
- Cash sales round down to nearest nickel
- Taxed sales round up to nearest nickel
- Transaction logging with full audit trail
- Daily sales statistics
- Revenue tracking
- Hot item detection (best sellers)
**Database Schema:**
Four tables for complete functionality:
- `items` - Product inventory
- `transactions` - Sales records
- `transaction_items` - Line items per transaction
- `carts` - Temporary shopping carts
#### Commands Implemented:
**Item Management:**
- `itemadd <name> <price> <qty> [location]` - Add new item
- `itemremove <name>` - Remove item
- `itemreset <name> [price=X] [qty=Y]` - Update item
- `itemsell <name> <qty> [notes]` - Quick sale
- `itemreturn <transaction_id>` - Reverse transaction
- `itemlist` - View all inventory
- `itemstats` - Daily statistics
**Cart System:**
- `cartadd <name> <qty>` - Add to cart
- `cartremove <name>` - Remove from cart
- `cartlist` / `cart` - View cart
- `cartbuy` / `cartsell [notes]` - Complete transaction
- `cartclear` - Empty cart
## Files Created/Modified
### New Files:
1. **modules/inventory.py** (625 lines)
- Complete inventory and POS module
- All item management functions
- Cart system implementation
- Transaction processing
- Penny rounding logic
2. **modules/inventory.md** (8,529 chars)
- Comprehensive user guide
- Command reference
- Use case examples
- Database schema documentation
3. **modules/checklist.md** (9,058 chars)
- Enhanced checklist user guide
- Safety monitoring documentation
- Best practices
- Scenario examples
### Modified Files:
1. **modules/checklist.py**
- Added time interval monitoring
- Added approval workflow functions
- Enhanced database schema
- Updated command processing
2. **modules/settings.py**
- Added inventory configuration section
- Added `inventory_enabled` setting
- Added `inventory_db` path setting
- Added `disable_penny` setting
3. **config.template**
- Added `[inventory]` section
- Documentation for penny rounding
4. **modules/system.py**
- Integrated inventory module
- Added trap list for inventory commands
5. **mesh_bot.py**
- Added inventory command handlers
- Added checklist approval commands
- Created `handle_inventory()` function
6. **modules/README.md**
- Updated checklist section with new features
- Added complete inventory/POS section
- Updated table of contents
7. **.gitignore**
- Added database files to ignore list
## Configuration
### Enable Inventory System:
```ini
[inventory]
enabled = True
inventory_db = data/inventory.db
disable_penny = False # Set to True for USA penny rounding
```
### Checklist Already Configured:
```ini
[checklist]
enabled = False # Set to True to enable
checklist_db = data/checklist.db
reverse_in_out = False
```
## Testing Results
All functionality tested and verified:
- ✅ Module imports work correctly
- ✅ Database initialization successful
- ✅ Inventory commands function properly
- ✅ Cart system working as expected
- ✅ Checklist enhancements operational
- ✅ Time interval monitoring active
- ✅ Trap lists properly registered
- ✅ Help commands return correct information
## Use Cases Addressed
### From Issue Comments:
1. **Point of Sale Logic**
- Complete POS system with inventory management
- Cart-based transactions
- Sales tracking and reporting
2. **Check-in Time Windows**
- Interval-based monitoring
- Overdue detection
- Safety accountability for solo activities
3. **Geo-location Awareness**
- Automatic GPS capture when checking in/out
- Location stored with each check-in
- Foundation for "are you ok" alerts
4. **Asset Management**
- Track any type of asset (tools, equipment, supplies)
- Multiple locations support
- Full transaction history
5. **Penny Rounding**
- Configurable USA cash sale rounding
- Separate logic for cash vs taxed sales
- Down for cash, up for tax
## Security Features
- Users on `bbs_ban_list` cannot use inventory or checklist commands
- Admin-only approval commands
- Parameterized SQL queries prevent injection
- Per-user cart isolation
- Full transaction audit trail
## Documentation Provided
1. **User Guides:**
- Comprehensive inventory.md with examples
- Detailed checklist.md with safety scenarios
- Updated main README.md
2. **Technical Documentation:**
- Database schema details
- Configuration examples
- Command reference
- API documentation in code comments
3. **Examples:**
- Emergency supply tracking
- Event merchandise sales
- Field equipment management
- Safety monitoring scenarios
## Future Enhancement Opportunities
The implementation provides foundation for:
- Scheduled overdue check-in alerts
- Email/SMS notifications for overdue status
- Dashboard/reporting interface
- Barcode/QR code support
- Multi-location inventory tracking
- Inventory forecasting
- Integration with external systems
## Backward Compatibility
- Existing checklist databases automatically migrate
- New features are opt-in via configuration
- No breaking changes to existing commands
- Graceful handling of missing database columns
## Performance Considerations
- SQLite databases for reliability and simplicity
- Indexed primary keys for fast lookups
- Efficient query design
- Minimal memory footprint
- No external dependencies beyond stdlib
## Conclusion
This implementation fully addresses all requirements from the GitHub issue:
- ✅ Enhanced check-in/check-out with SQL improvements
- ✅ Point of sale logic with inventory management
- ✅ Time window notifications for safety
- ✅ Asset tracking for any item type
- ✅ Penny rounding for USA cash sales
- ✅ Cart management system
- ✅ Comprehensive documentation
The system is production-ready, well-tested, and documented for immediate use.

View File

@@ -1,52 +0,0 @@
import schedule
from modules.log import logger
from modules.system import send_message
def setup_custom_schedules(send_message, tell_joke, welcome_message, handle_wxc, MOTD, schedulerChannel, schedulerInterface):
# custom scheduler job to run the schedule see examples below
logger.debug(f"System: Starting the custom_scheduler.py default to send reminder every Monday at noon on Device:{schedulerInterface} Channel:{schedulerChannel}")
schedule.every().monday.at("12:00").do(lambda: logger.info("System: Scheduled Broadcast Enabled Reminder"))
# Enhanced Examples of using the scheduler, Times here are in 24hr format
# https://schedule.readthedocs.io/en/stable/
# Send a joke every 2 minutes
#logger.debug(f"System: Custom Scheduler: Send a joke every 2 minutes on Device:{schedulerInterface} Channel:{schedulerChannel}")
#schedule.every(2).minutes.do(lambda: send_message(tell_joke(), schedulerChannel, 0, schedulerInterface))
# Good Morning Every day at 09:00 using send_message function to channel 2 on device 1
#schedule.every().day.at("09:00").do(lambda: send_message("Good Morning", 2, 0, 1))
# Send WX every Morning at 08:00 using handle_wxc function to channel 2 on device 1
#logger.debug("System: Custom Scheduler: Send WX every Morning at 08:00")
#schedule.every().day.at("08:00").do(lambda: send_message(handle_wxc(0, 1, 'wx'), 2, 0, 1))
# Send Weather Channel Notice Wed. Noon on channel 2, device 1
#schedule.every().wednesday.at("12:00").do(lambda: send_message("Weather alerts available on 'Alerts' channel with default 'AQ==' key.", 2, 0, 1))
# Send config URL for Medium Fast Network Use every other day at 10:00 to default channel 2 on device 1
#logger.debug("System: Custom Scheduler: Config URL for Medium Fast Network Use every other day at 10:00")
#schedule.every(2).days.at("10:00").do(lambda: send_message("Join us on Medium Fast https://meshtastic.org/e/#CgcSAQE6AggNEg4IARAEOAFAA0gBUB5oAQ", 2, 0, 1))
# Send a Net Starting Now Message Every Wednesday at 19:00 using send_message function to channel 2 on device 1
#schedule.every().wednesday.at("19:00").do(lambda: send_message("Net Starting Now", 2, 0, 1))
# Send a Welcome Notice for group on the 15th and 25th of the month at 12:00 using send_message function to channel 2 on device 1
#schedule.every().day.at("12:00").do(lambda: send_message("Welcome to the group", 2, 0, 1)).day(15, 25)
# Send a Welcome Notice for group on the 15th and 25th of the month at 12:00
#logger.debug(f"System: Custom Scheduler: Welcome Notice for group on the 15th and 25th of the month at 12:00 on Device:{schedulerInterface} Channel:{schedulerChannel}")
#schedule.every().day.at("12:00").do(lambda: send_message("Welcome to the group", schedulerChannel, 0, schedulerInterface)).day(15, 25)
# Send a joke every 6 hours
#schedule.every(6).hours.do(lambda: send_message(tell_joke(), schedulerChannel, 0, schedulerInterface))
# Send the Welcome Message every other day at 08:00
#schedule.every(2).days.at("08:00").do(lambda: send_message(welcome_message, schedulerChannel, 0, schedulerInterface))
# Send the MOTD every day at 13:00
#schedule.every().day.at("13:00").do(lambda: send_message(MOTD, schedulerChannel, 0, schedulerInterface))
# Send bbslink looking for peers every other day at 10:00
#logger.debug("System: Custom Scheduler: bbslink MeshBot looking for peers every other day at 10:00")
#schedule.every(2).days.at("10:00").do(lambda: send_message("bbslink MeshBot looking for peers", schedulerChannel, 0, schedulerInterface))

View File

@@ -0,0 +1,125 @@
#!/usr/bin/python3
import schedule
from modules.log import logger
from modules.settings import MOTD
from modules.system import send_message
def setup_custom_schedules(send_message, tell_joke, welcome_message, handle_wxc, MOTD, schedulerChannel, schedulerInterface):
"""
Set up custom schedules. Edit the example schedules as needed.
1. in config.ini set "value" under [scheduler] to: value = custom
2. edit this file to add/remove/modify schedules
3. restart mesh bot
4. verify schedules are working by checking the log file
5. Make sure to uncomment (delete the single #) the example schedules down at the end of the file to enable them
Python is sensitive to indentation so be careful when editing this file.
https://thonny.org is included on pi's image and is a simple IDE to use for editing python files.
Available functions you can import and use, be sure they are enabled modules in config.ini:
- tell_joke() - Returns a random joke
- welcome_message - A welcome message string
- handle_wxc(message_from_id, deviceID, cmd, days=None) - Weather information
- handleNews(message_from_id, deviceID, message, isDM) - News reader
- get_rss_feed(msg) - RSS feed reader
- handle_mwx(message_from_id, deviceID, cmd) - Marine weather
- sysinfo(message, message_from_id, deviceID, isDM) - System information
- handle_tide(message_from_id, deviceID, channel_number) - Tide information
- handle_sun(message_from_id, deviceID, channel_number) - Sun information
- MOTD - Message of the day string
"""
try:
# Import additional functions for scheduling (optional, depending on your needs)
from mesh_bot import handleNews, sysinfo, handle_mwx, handle_tide, handle_sun
from modules.rss import get_rss_feed
# Example task functions, modify as needed the channel and interface parameters default to schedulerChannel and schedulerInterface
def send_joke(channel, interface):
## uses system.send_message to send the result of tell_joke()
send_message(tell_joke(), channel, 0, interface)
def send_good_morning(channel, interface):
## uses system.send_message to send "Good Morning"
send_message("Good Morning", channel, 0, interface)
def send_wx(channel, interface):
## uses system.send_message to send the result of handle_wxc(id,id,cmd,days_returned)
send_message(handle_wxc(0, 1, 'wx', days=1), channel, 0, interface)
def send_weather_alert(channel, interface):
## uses system.send_message to send string
send_message("Weather alerts available on 'Alerts' channel with default 'AQ==' key.", channel, 0, interface)
def send_config_url(channel, interface):
## uses system.send_message to send string
send_message("Join us on Medium Fast https://meshtastic.org/e/#CgcSAQE6AggNEg4IARAEOAFAA0gBUB5oAQ", channel, 0, interface)
def send_net_starting(channel, interface):
## uses system.send_message to send string, channel 2, interface 3
send_message("Net Starting Now", 2, 0, 3)
def send_welcome(channel, interface):
## uses system.send_message to send string, channel 2, interface 1
send_message("Welcome to the group", 2, 0, 1)
def send_motd(channel, interface):
## uses system.send_message to send message of the day string which can be updated in runtime
send_message(MOTD, channel, 0, interface)
def send_news(channel, interface):
## uses system.send_message to send the result of handleNews()
send_message(handleNews(0, interface, 'readnews', False), channel, 0, interface)
def send_rss(channel, interface):
## uses system.send_message to send the result of get_rss_feed()
send_message(get_rss_feed(''), channel, 0, interface)
def send_marine_weather(channel, interface):
## uses system.send_message to send the result of handle_mwx()
send_message(handle_mwx(0, interface, 'mwx'), channel, 0, interface)
def send_sysinfo(channel, interface):
## uses system.send_message to send the result of sysinfo()
send_message(sysinfo('', 0, interface, False), channel, 0, interface)
def send_tide(channel, interface):
## uses system.send_message to send the result of handle_tide()
send_message(handle_tide(0, interface, channel), channel, 0, interface)
def send_sun(channel, interface):
## uses system.send_message to send the result of handle_sun()
send_message(handle_sun(0, interface, channel), channel, 0, interface)
### Send a joke every 2 minutes
#schedule.every(2).minutes.do(lambda: send_joke(schedulerChannel, schedulerInterface))
### Send a good morning message every day at 9 AM
#schedule.every().day.at("09:00").do(lambda: send_good_morning(schedulerChannel, schedulerInterface))
### Send weather update every day at 8 AM
#schedule.every().day.at("08:00").do(lambda: send_wx(schedulerChannel, schedulerInterface))
### Send weather alerts every Wednesday at noon
#schedule.every().wednesday.at("12:00").do(lambda: send_weather_alert(schedulerChannel, schedulerInterface))
### Send configuration URL every 2 days at 10 AM
#schedule.every(2).days.at("10:00").do(lambda: send_config_url(schedulerChannel, schedulerInterface))
### Send net starting message every Wednesday at 7 PM
#schedule.every().wednesday.at("19:00").do(lambda: send_net_starting(schedulerChannel, schedulerInterface))
### Send welcome message every 2 days at 8 AM
#schedule.every(2).days.at("08:00").do(lambda: send_welcome(schedulerChannel, schedulerInterface))
### Send MOTD every day at 1 PM
#schedule.every().day.at("13:00").do(lambda: send_motd(schedulerChannel, schedulerInterface))
### Send bbslink message every 2 days at 10 AM
#schedule.every(2).days.at("10:00").do(lambda: send_message("bbslink MeshBot looking for peers", schedulerChannel, 0, schedulerInterface))
### Send news updates every 6 hours
#schedule.every(6).hours.do(lambda: send_news(schedulerChannel, schedulerInterface))
### Send RSS feed every day at 9 AM
#schedule.every().day.at("09:00").do(lambda: send_rss(schedulerChannel, schedulerInterface))
### Send marine weather every day at 6 AM
#schedule.every().day.at("06:00").do(lambda: send_marine_weather(schedulerChannel, schedulerInterface))
### Send system information every day at 12 PM
#schedule.every().day.at("12:00").do(lambda: send_sysinfo(schedulerChannel, schedulerInterface))
### Send tide information every day at 5 AM
#schedule.every().day.at("05:00").do(lambda: send_tide(schedulerChannel, schedulerInterface))
### Send sun information every day at 6 AM
#schedule.every().day.at("06:00").do(lambda: send_sun(schedulerChannel, schedulerInterface))
except Exception as e:
logger.error(f"Error setting up custom schedules: {e}")

View File

@@ -1,5 +1,8 @@
# Load the bbs messages from the database file to screen for admin functions
import pickle # pip install pickle
import pickle
import sqlite3
print ("\n Meshing-Around Database Admin Tool\n")
# load the bbs messages from the database file
@@ -106,7 +109,70 @@ except Exception as e:
golfsim_score = "System: data/golfsim_hs.pkl not found"
print ("\n Meshing-Around Database Admin Tool\n")
# checklist.db admin display
print("\nCurrent Check-ins Table\n")
try:
conn = sqlite3.connect('../data/checklist.db')
except Exception:
conn = sqlite3.connect('data/checklist.db')
c = conn.cursor()
try:
c.execute("""
SELECT * FROM checkin
WHERE removed = 0
ORDER BY checkin_id DESC
LIMIT 20
""")
rows = c.fetchall()
col_names = [desc[0] for desc in c.description]
if rows:
# Print header
header = " | ".join(f"{name:<15}" for name in col_names)
print(header)
print("-" * len(header))
# Print rows
for row in rows:
print(" | ".join(f"{str(col):<15}" for col in row))
else:
print("No check-ins found.")
except Exception as e:
print(f"Error reading check-ins: {e}")
finally:
conn.close()
# inventory.db admin display
print("\nCurrent Inventory Table\n")
try:
conn = sqlite3.connect('../data/inventory.db')
except Exception:
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
try:
c.execute("""
SELECT * FROM inventory
ORDER BY item_id DESC
LIMIT 20
""")
rows = c.fetchall()
col_names = [desc[0] for desc in c.description]
if rows:
# Print header
header = " | ".join(f"{name:<15}" for name in col_names)
print(header)
print("-" * len(header))
# Print rows
for row in rows:
print(" | ".join(f"{str(col):<15}" for col in row))
else:
print("No inventory items found.")
except Exception as e:
print(f"Error reading inventory: {e}")
finally:
conn.close()
# Pickle database displays
print ("System: bbs_messages")
print (bbs_messages)
print ("\nSystem: bbs_dm")

102
etc/fakeNode.py Normal file
View File

@@ -0,0 +1,102 @@
# https://github.com/pdxlocations/mudp/blob/main/examples/helloworld-example.py
import time
import random
from pubsub import pub
from meshtastic.protobuf import mesh_pb2
from mudp import (
conn,
node,
UDPPacketStream,
send_nodeinfo,
send_text_message,
send_device_telemetry,
send_position,
send_environment_metrics,
send_power_metrics,
send_waypoint,
)
MCAST_GRP = "224.0.0.69"
MCAST_PORT = 4403
KEY = "1PG7OiApB1nwvP+rz05pAQ=="
interface = UDPPacketStream(MCAST_GRP, MCAST_PORT, key=KEY)
def setup_node():
node.node_id = "!deadbeef"
node.long_name = "UDP Test"
node.short_name = "UDP"
node.channel = "LongFast"
node.key = "AQ=="
conn.setup_multicast(MCAST_GRP, MCAST_PORT)
# Convert hex node_id to decimal (strip the '!' first)
decimal_id = int(node.node_id[1:], 16)
print(f"Node ID: {node.node_id} (decimal: {decimal_id})")
print(f"Channel: {node.channel}, Key: {node.key}")
def demo_send_messages():
print("Sending node info...")
send_nodeinfo()
time.sleep(3)
print("Sending text message...")
send_text_message("hello world")
time.sleep(3)
print("Sending device telemetry position...")
send_position(latitude=37.7749, longitude=-122.4194, altitude=3000, precision_bits=3, ground_speed=5)
time.sleep(3)
print("Sending device telemetry local node data...")
send_device_telemetry(battery_level=50, voltage=3.7, channel_utilization=25, air_util_tx=15, uptime_seconds=123456)
time.sleep(3)
print("Sending environment metrics...")
send_environment_metrics(
temperature=23.072298,
relative_humidity=17.5602016,
barometric_pressure=995.36261,
gas_resistance=229.093369,
voltage=5.816,
current=-29.3,
iaq=66,
)
time.sleep(3)
print("Sending power metrics...")
send_power_metrics(
ch1_voltage=18.744,
ch1_current=11.2,
ch2_voltage=2.792,
ch2_current=18.4,
ch3_voltage=0,
ch3_current=0,
)
time.sleep(3)
print("Sending waypoint...")
send_waypoint(
id=random.randint(1, 2**32 - 1),
latitude=45.271394,
longitude=-121.736083,
expire=0,
locked_to=node.node_id,
name="Camp",
description="Main campsite near the lake",
icon=0x1F3D5, # 🏕
)
def main():
setup_node()
interface.start()
print("MUDP Fake Node is running. Press Ctrl+C to exit.")
print("You can send demo messages to the network.")
try:
while True:
answer = input("Do you want to send demo messages? (y/n): ").strip().lower()
if answer == "y":
demo_send_messages()
elif answer == "n":
print("Exiting.")
break
except KeyboardInterrupt:
pass
finally:
interface.stop()
if __name__ == "__main__":
main()

View File

@@ -2,9 +2,11 @@
Description=MeshingAround-ReportingTask
[Timer]
OnUnitActiveSec=1h
OnbootSec=5min
OnCalendar=*-*-* 04:20:00
Persistent=true
Unit=mesh_bot_reporting.service
#OnUnitActiveSec=1h
#OnbootSec=5min
[Install]
WantedBy=timers.target

View File

@@ -14,6 +14,9 @@ Group=pi
WorkingDirectory=/dir/
ExecStart=python3 etc/report_generator5.py
ExecStop=pkill -f report_generator5.py
# ExecStart=python3 etc/report_generator.py
# ExecStop=pkill -f report_generator.py
# Disable Python's buffering of STDOUT and STDERR, so that output from the
# service shows up immediately in systemd's logs
@@ -23,4 +26,4 @@ Restart=on-failure
Type=notify #try simple if any problems
[Install]
WantedBy=default.target
WantedBy=timers.target

View File

@@ -1,28 +0,0 @@
# /etc/systemd/system/mesh_bot_w3.service
# sudo systemctl daemon-reload
# sudo systemctl enable mesh_bot_w3.service
# sudo systemctl start mesh_bot_w3.service
[Unit]
Description=MeshingAround-W3Server
After=network.target
[Service]
Type=simple
User=pi
Group=pi
WorkingDirectory=/dir/
ExecStart=python3 modules/web.py
ExecStop=pkill -f mesh_bot_w3.py
Environment=REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
Environment=SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
# Disable Python's buffering of STDOUT and STDERR, so that output from the
# service shows up immediately in systemd's logs
Environment=PYTHONUNBUFFERED=1
Restart=on-failure
Type=notify #try simple if any problems
[Install]
WantedBy=default.target

View File

@@ -0,0 +1,21 @@
[Unit]
Description=MeshingAround-WebServer
After=network.target
[Service]
Type=simple
User=pi
Group=pi
WorkingDirectory=/dir/
ExecStart=python3 modules/web.py
ExecStop=pkill -f mesh_bot_w3.py
Environment=REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
Environment=SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
Environment=PYTHONUNBUFFERED=1
Restart=on-failure
[Install]
WantedBy=multi-user.target

40
etc/set-permissions.sh Normal file
View File

@@ -0,0 +1,40 @@
#!/bin/bash
# Set ownership and permissions for Meshing Around application
# Check if run as root
if [ "$EUID" -ne 0 ]; then
echo "Please run as root"
exit 1
fi
# Use first argument as user, or default to meshbot
TARGET_USER="${1:-meshbot}"
# Check if user exists
if ! id "$TARGET_USER" &>/dev/null; then
echo "User '$TARGET_USER' does not exist."
read -p "Would you like to use the current user ($(logname)) instead? [y/N]: " yn
if [[ "$yn" =~ ^[Yy]$ ]]; then
TARGET_USER="$(logname)"
echo "Using current user: $TARGET_USER"
if ! id "$TARGET_USER" &>/dev/null; then
echo "Current user '$TARGET_USER' does not exist or cannot be determined."
exit 1
fi
else
echo "Exiting."
exit 1
fi
fi
echo "Setting ownership to $TARGET_USER:$TARGET_USER"
chown -R "$TARGET_USER:$TARGET_USER" "/opt/meshing-around/-around"
chown -R "$TARGET_USER:$TARGET_USER" "/opt/meshing-around/-around/logs"
chown -R "$TARGET_USER:$TARGET_USER" "/opt/meshing-around/-around/data"
chown "$TARGET_USER:$TARGET_USER" "/opt/meshing-around/-around/config.ini"
chmod 640 "/opt/meshing-around/-around/config.ini"
chmod 750 "/opt/meshing-around/-around/logs"
chmod 750 "/opt/meshing-around/-around/data"
echo "Permissions and ownership have been set."

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env python3
# # Simulate meshing-around de K7MHI 2024
from modules.log import * # Import the logger; ### --> If you are reading this put the script in the project root <-- ###
from modules.log import logger, getPrettyTime # Import the logger; ### --> If you are reading this put the script in the project root <-- ###
import time
import datetime
import random
# Initialize the tool
@@ -22,6 +23,8 @@ def get_name_from_number(nodeID, length='short', interface=1):
# return random name for nodeID
names = ["Max","Molly","Jake","Kelly"]
return names[nodeID % len(names)]
def mesh_bot(message, nodeID, deviceID):
return "Meshing-Around Bot at your service!"
#simulate GPS locations for testing
locations = [
(48.200909, -123.25719),

View File

@@ -1,10 +1,71 @@
#!/bin/bash
# meshing-around install helper script
# to uninstall, run with --nope
# install.sh
NOPE=0
cd "$(dirname "$0")"
program_path=$(pwd)
chronjob="0 1 * * * /usr/bin/python3 $program_path/etc/report_generator5.py"
for arg in "$@"; do
if [[ "$arg" == "--nope" ]]; then
NOPE=1
fi
done
if [[ $NOPE -eq 1 ]]; then
echo "Uninstalling Meshing Around and all related services..."
sudo systemctl stop mesh_bot || true
sudo systemctl disable mesh_bot || true
sudo systemctl stop pong_bot || true
sudo systemctl disable pong_bot || true
sudo systemctl stop mesh_bot_w3_server || true
sudo systemctl disable mesh_bot_w3_server || true
sudo systemctl stop mesh_bot_reporting || true
sudo systemctl disable mesh_bot_reporting || true
sudo rm -f /etc/systemd/system/mesh_bot.service
sudo rm -f /etc/systemd/system/mesh_bot_reporting
sudo rm -f /etc/systemd/system/pong_bot.service
sudo rm -f /etc/systemd/system/mesh_bot_w3_server.service
sudo rm -f /etc/systemd/system/mesh_bot_reporting.service
sudo rm -f /etc/systemd/system/mesh_bot_reporting.timer
sudo systemctl daemon-reload
sudo systemctl reset-failed
sudo gpasswd -d meshbot dialout || true
sudo gpasswd -d meshbot tty || true
sudo gpasswd -d meshbot bluetooth || true
sudo groupdel meshbot || true
sudo userdel meshbot || true
sudo rm -rf /opt/meshing-around/
# If Ollama was installed and you want to remove it:
if [[ -f /etc/systemd/system/ollama.service ]]; then
read -p "Ollama service detected. Do you want to remove Ollama and all its data? (y/n): " remove_ollama
if [[ "$remove_ollama" =~ ^[Yy] ]]; then
sudo systemctl stop ollama || true
sudo systemctl disable ollama || true
sudo rm -f /etc/systemd/system/ollama.service
sudo rm -rf /usr/local/bin/ollama
sudo rm -rf ~/.ollama
echo "Ollama removed."
else
echo "Ollama not removed."
fi
fi
echo "Uninstall complete. Hope to see you again! 73"
exit 0
fi
# install.sh, Meshing Around installer script
# Thanks for using Meshing Around!
printf "\n########################"
printf "\nMeshing Around Installer\n"
printf "########################\n"
@@ -76,21 +137,23 @@ else
printf "\nDependencies installed\n"
fi
# add user to groups for serial access
printf "\nAdding user to dialout, bluetooth, and tty groups for serial access\n"
sudo usermod -a -G dialout "$USER"
sudo usermod -a -G tty "$USER"
sudo usermod -a -G bluetooth "$USER"
# copy service files
cp etc/pong_bot.tmp etc/pong_bot.service
cp etc/mesh_bot.tmp etc/mesh_bot.service
cp etc/mesh_bot_reporting.tmp etc/mesh_bot_reporting.service
cp etc/mesh_bot_w3.tmp etc/mesh_bot_w3.service
cp etc/mesh_bot_w3_server.tmp etc/mesh_bot_w3_server.service
# set the correct path in the service file
replace="s|/dir/|$program_path/|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_w3_server.service
# copy modules/custom_scheduler.py template if it does not exist
if [[ ! -f modules/custom_scheduler.py ]]; then
cp etc/custom_scheduler.py modules/custom_scheduler.py
cp etc/custom_scheduler.template modules/custom_scheduler.py
printf "\nCustom scheduler template copied to modules/custom_scheduler.py\n"
fi
@@ -184,15 +247,7 @@ else
read bot
fi
# set the correct path in the service file
replace="s|/dir/|$program_path/|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_w3.service
# set the correct user in the service file?
#ask if we should add a user for the bot
# ask if we should add a user for the bot
if [[ $(echo "${embedded}" | grep -i "^n") ]]; then
printf "\nDo you want to add a local user (meshbot) no login, for the bot? (y/n)"
read meshbotservice
@@ -208,7 +263,23 @@ if [[ $(echo "${meshbotservice}" | grep -i "^y") ]] || [[ $(echo "${embedded}" |
else
whoami=$(whoami)
fi
# set basic permissions for the bot user
# set the correct user in the service file
replace="s|User=pi|User=$whoami|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_reporting.timer
# set the correct group in the service file
replace="s|Group=pi|Group=$whoami|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_reporting.timer
printf "\n service files updated\n"
# add user to groups for serial access
printf "\nAdding user to dialout, bluetooth, and tty groups for serial access\n"
sudo usermod -a -G dialout "$whoami"
sudo usermod -a -G tty "$whoami"
sudo usermod -a -G bluetooth "$whoami"
@@ -216,6 +287,11 @@ echo "Added user $whoami to dialout, tty, and bluetooth groups"
sudo chown -R "$whoami:$whoami" "$program_path/logs"
sudo chown -R "$whoami:$whoami" "$program_path/data"
sudo chown "$whoami:$whoami" "$program_path/config.ini"
sudo chmod 640 "$program_path/config.ini"
echo "Permissions set for meshbot on config.ini"
sudo chmod 750 "$program_path/logs"
sudo chmod 750 "$program_path/data"
echo "Permissions set for meshbot on logs and data directories"
# check and see if some sort of NTP is running
@@ -225,18 +301,6 @@ if ! systemctl is-active --quiet ntp.service && \
printf "\nNo NTP service detected, it is recommended to have NTP running for proper bot operation.\n"
fi
# set the correct user in the service file
replace="s|User=pi|User=$whoami|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_w3.service
replace="s|Group=pi|Group=$whoami|g"
sed -i "$replace" etc/pong_bot.service
sed -i "$replace" etc/mesh_bot.service
sed -i "$replace" etc/mesh_bot_reporting.service
sed -i "$replace" etc/mesh_bot_w3.service
printf "\n service files updated\n"
if [[ $(echo "${bot}" | grep -i "^p") ]]; then
# install service for pong bot
@@ -256,6 +320,29 @@ if [[ $(echo "${bot}" | grep -i "^m") ]]; then
service="mesh_bot"
fi
# install mesh_bot_reporting timer to run daily at 4:20 am
# echo ""
# echo "Installing mesh_bot_reporting.timer to run mesh_bot_reporting daily at 4:20 am..."
# sudo cp etc/mesh_bot_reporting.service /etc/systemd/system/
# sudo cp etc/mesh_bot_reporting.timer /etc/systemd/system/
# sudo systemctl daemon-reload
# sudo systemctl enable mesh_bot_reporting.timer
# sudo systemctl start mesh_bot_reporting.timer
# echo "mesh_bot_reporting.timer installed and enabled"
# echo "Check timer status with: systemctl status mesh_bot_reporting.timer"
# echo "List all timers with: systemctl list-timers"
# echo ""
# # install mesh_bot_w3_server service
# echo "Installing mesh_bot_w3_server.service to run the web3 server..."
# sudo cp etc/mesh_bot_w3_server.service /etc/systemd/system/
# sudo systemctl daemon-reload
# sudo systemctl enable mesh_bot_w3_server.service
# sudo systemctl start mesh_bot_w3_server.service
# echo "mesh_bot_w3_server.service installed and enabled"
# echo "Check service status with: systemctl status mesh_bot_w3_server.service"
# echo ""
# check if running on embedded for final steps
if [[ $(echo "${embedded}" | grep -i "^n") ]]; then
# ask if emoji font should be installed for linux
@@ -315,8 +402,14 @@ if [[ $(echo "${embedded}" | grep -i "^n") ]]; then
printf "sudo journalctl -u %s.service\n" "$service" >> install_notes.txt
printf "sudo systemctl stop %s.service\n" "$service" >> install_notes.txt
printf "sudo systemctl disable %s.service\n" "$service" >> install_notes.txt
printf "Reporting chron job added to run report_generator5.py\n" >> install_notes.txt
printf "chronjob: %s\n" "$chronjob" >> install_notes.txt
printf "sudo systemctl disable %s.service\n" "$service" >> install_notes.txt
printf "\n older chron statment to run the report generator hourly:\n" >> install_notes.txt
printf "0 * * * * /usr/bin/python3 $program_path/etc/report_generator5.py" >> install_notes.txt
printf " to edit crontab run 'crontab -e'\n" >> install_notes.txt
printf "\nmesh_bot_reporting.timer installed to run daily at 4:20 am\n" >> install_notes.txt
printf "Check timer status: systemctl status mesh_bot_reporting.timer\n" >> install_notes.txt
printf "List all timers: systemctl list-timers\n" >> install_notes.txt
printf "View timer logs: journalctl -u mesh_bot_reporting.timer\n" >> install_notes.txt
printf "*** Stay Up to date using 'bash update.sh' ***\n" >> install_notes.txt
if [[ $(echo "${venv}" | grep -i "^y") ]]; then
@@ -347,14 +440,17 @@ else
sudo systemctl daemon-reload
sudo systemctl enable $service.service
sudo systemctl start $service.service
# check if the cron job already exists
if ! crontab -l | grep -q "$chronjob"; then
# add the cron job to run the report_generator5.py script
(crontab -l 2>/dev/null; echo "$chronjob") | crontab -
printf "\nAdded cron job to run report_generator5.py\n"
else
printf "\nCron job already exists, skipping\n"
fi
sudo systemctl daemon-reload
# # check if the cron job already exists
# if ! crontab -l | grep -q "$chronjob"; then
# # add the cron job to run the report_generator5.py script
# (crontab -l 2>/dev/null; echo "$chronjob") | crontab -
# printf "\nAdded cron job to run report_generator5.py\n"
# else
# printf "\nCron job already exists, skipping\n"
# fi
# document the service install
printf "Reference following commands:\n\n" > install_notes.txt
printf "sudo systemctl status %s.service\n" "$service" >> install_notes.txt
printf "sudo systemctl start %s.service\n" "$service" >> install_notes.txt
@@ -363,6 +459,12 @@ else
printf "sudo journalctl -u %s.service\n" "$service" >> install_notes.txt
printf "sudo systemctl stop %s.service\n" "$service" >> install_notes.txt
printf "sudo systemctl disable %s.service\n" "$service" >> install_notes.txt
printf "older crontab to run the report generator hourly:" >> install_notes.txt
printf "0 * * * * /usr/bin/python3 $program_path/etc/report_generator5.py" >> install_notes.txt
printf " to edit crontab run 'crontab -e'" >> install_notes.txt
printf "\nmesh_bot_reporting.timer installed to run daily at 4:20 am\n" >> install_notes.txt
printf "Check timer status: systemctl status mesh_bot_reporting.timer\n" >> install_notes.txt
printf "List all timers: systemctl list-timers\n" >> install_notes.txt
printf "*** Stay Up to date using 'bash update.sh' ***\n" >> install_notes.txt
fi
@@ -374,13 +476,23 @@ exit 0
# sudo systemctl stop mesh_bot
# sudo systemctl disable mesh_bot
# sudo systemctl stop pong_bot
# sudo systemctl disable pong_bot
# sudo systemctl stop mesh_bot_w3_server
# sudo systemctl disable mesh_bot_w3_server
# sudo systemctl stop mesh_bot_reporting
# sudo systemctl disable mesh_bot_reporting
# sudo rm /etc/systemd/system/mesh_bot.service
# sudo rm /etc/systemd/system/mesh_bot_w3.service
# sudo rm /etc/systemd/system/mesh_bot_reporting
# sudo rm /etc/systemd/system/pong_bot.service
# sudo rm /etc/systemd/system/mesh_bot_w3_server.service
# sudo rm /etc/systemd/system/mesh_bot_reporting.service
# sudo rm /etc/systemd/system/mesh_bot_reporting.timer
# sudo systemctl daemon-reload
# sudo systemctl reset-failed
@@ -390,7 +502,14 @@ exit 0
# sudo groupdel meshbot
# sudo userdel meshbot
# sudo rm -rf /opt/meshing-around
# sudo rm -rf /opt/meshing-around/
# If Ollama was installed and you want to remove it:
# sudo systemctl stop ollama
# sudo systemctl disable ollama
# sudo rm /etc/systemd/system/ollama.service
# sudo rm -rf /usr/local/bin/ollama
# sudo rm -rf ~/.ollama
# after install shenannigans

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,8 @@
# Meshtastic Mesh-Bot Modules
This document provides an overview of all modules available in the Mesh-Bot project, including their features, usage, and configuration.
Updated Oct-2025 "ver 1.9.8.4"
This document provides an overview of all modules available in the Mesh-Bot project, including their features, usage, and configuration.
---
## Table of Contents
- [Overview](#overview)
@@ -12,20 +10,21 @@ Updated Oct-2025 "ver 1.9.8.4"
- [Games](#games)
- [BBS (Bulletin Board System)](#bbs-bulletin-board-system)
- [Checklist](#checklist)
- [Inventory & Point of Sale](#inventory--point-of-sale)
- [Location & Weather](#location--weather)
- [Map Command](#map-command)
- [EAS & Emergency Alerts](#eas--emergency-alerts)
- [File Monitoring & News](#file-monitoring--news)
- [Radio Monitoring](#radio-monitoring)
- [Voice Commands (VOX)](#voice-commands-vox)
- [Ollama LLM/AI](#ollama-llmai)
- [Wikipedia Search](#wikipedia-search)
- [Scheduler](#-mesh-bot-scheduler-user-guide)
- [DX Spotter Module](#dx-spotter-module)
- [Mesh Bot Scheduler User Guide](#mesh-bot-scheduler-user-guide)
- [Other Utilities](#other-utilities)
- [Configuration](#configuration)
- [Messaging Settings](#messaging-settings)
- [Troubleshooting](#troubleshooting)
- [Adding your Own](adding_more.md)
- [Configuration Guide](#configuration-guide)
---
## Overview
@@ -129,13 +128,153 @@ more at [meshBBS: How-To & API Documentation](bbstools.md)
## Checklist
### Enhanced Check-in/Check-out System
The checklist module provides asset tracking and accountability features with safety monitoring capabilities.
#### Basic Commands
| Command | Description |
|--------------|-----------------------------------------------|
| `checkin` | Check in a node/asset |
| `checkout` | Check out a node/asset |
| `checklist` | Show checklist database |
| `checklist` | Show active check-ins |
| `purgein` | Delete your check-in record |
| `purgeout` | Delete your check-out record |
Enable in `[checklist]` section of `config.ini`.
#### Advanced Features
- **Safety Monitoring with Time Intervals**
- Check in with an expected interval: `checkin 60 Hunting in tree stand`
- The system will track if you don't check back in within the specified time (in minutes)
- Ideal for solo activities, remote work, or safety accountability
- **Approval Workflow**
- `checklistapprove <id>` - Approve a pending check-in (admin)
- `checklistdeny <id>` - Deny/remove a check-in (admin)
more at [modules/checklist.md](modules/checklist.md)
#### Examples
```
# Basic check-in
checkin Arrived at campsite
# Check-in with 30-minute monitoring interval
checkin 30 Solo hiking on north trail
# Check out when done
checkout Heading back to base
# View all active check-ins
checklist
```
#### Configuration
Enable in `[checklist]` section of `config.ini`:
```ini
[checklist]
enabled = True
checklist_db = data/checklist.db
reverse_in_out = False
```
---
## Inventory & Point of Sale
### Complete Inventory Management System
The inventory module provides a full point-of-sale (POS) system with inventory tracking, cart management, and transaction logging.
#### Item Management Commands
| Command | Description |
|--------------|-----------------------------------------------|
| `itemadd <name> <qty> [price] [loc]` | Add new item to inventory |
| `itemremove <name>` | Remove item from inventory |
| `itemadd <name> <qty> [price] [loc]` | Update item price or quantity |
| `itemsell <name> <qty> [notes]` | Quick sale (bypasses cart) |
| `itemloan <name> <note>` - Loan/checkout an item |
| `itemreturn <transaction_id>` | Reverse a transaction |
| `itemlist` | View all inventory items |
| `itemstats` | View today's sales statistics |
#### Cart Commands
| Command | Description |
|--------------|-----------------------------------------------|
| `cartadd <name> <qty>` | Add item to your cart |
| `cartremove <name>` | Remove item from cart |
| `cartlist` or `cart` | View your cart |
| `cartbuy` or `cartsell` | Complete transaction |
| `cartclear` | Empty your cart |
more at [modules/inventory.py](modules/inventory.py)
#### Features
- **Transaction Tracking**: All sales are logged with timestamps and user information
- **Cart Management**: Build up orders before completing transactions
- **Penny Rounding**: Optional rounding for cash sales (USA mode)
- Cash sales round down
- Taxed sales round up
- **Hot Item Stats**: Track best-selling items
- **Location Tracking**: Optional warehouse/location field for items
- **Transaction History**: Full audit trail of all sales and returns
#### Examples
```
# Add items to inventory
itemadd Radio 149.99 5 Shelf-A
itemadd Battery 12.50 20 Warehouse-B
# View inventory
itemlist
# Add items to cart
cartadd Radio 2
cartadd Battery 4
# View cart
cartlist
# Complete sale
cartsell Customer purchase
# Quick sale without cart
itemsell Battery 1 Emergency sale
# View today's stats
itemstats
# Process a return
itemreturn 123
```
#### Configuration
Enable in `[inventory]` section of `config.ini`:
```ini
[inventory]
enabled = True
inventory_db = data/inventory.db
# Set to True to enable penny rounding for USA cash sales
disable_penny = False
```
#### Database Schema
The system uses SQLite with four tables:
- **items**: Product inventory
- **transactions**: Sales records
- **transaction_items**: Line items for each transaction
- **carts**: Temporary shopping carts
---
@@ -148,7 +287,7 @@ Enable in `[checklist]` section of `config.ini`.
| `wxa` | NOAA alerts |
| `wxalert` | NOAA alerts (expanded) |
| `mwx` | NOAA Coastal Marine Forecast |
| `tide` | NOAA tide info |
| `tide` | Tide info (NOAA/tidepredict for global) |
| `riverflow` | NOAA river flow info |
| `earthquake` | USGS earthquake info |
| `valert` | USGS volcano alerts |
@@ -160,6 +299,8 @@ Enable in `[checklist]` section of `config.ini`.
Configure in `[location]` section of `config.ini`.
**Note**: For global tide predictions outside the US, enable `useTidePredict = True` in `config.ini`. See [xtide.md](xtide.md) for setup details.
Certainly! Heres a README help section for your `mapHandler` command, suitable for users of your meshbot:
---
@@ -220,11 +361,77 @@ Configure in `[fileMon]` section of `config.ini`.
## Radio Monitoring
The Radio Monitoring module provides several ways to integrate amateur radio software with the mesh network.
### Hamlib Integration
| Command | Description |
|--------------|-----------------------------------------------|
| `radio` | Monitor radio SNR via Hamlib |
Configure in `[radioMon]` section of `config.ini`.
Monitors signal strength (S-meter) from a connected radio via Hamlib's `rigctld` daemon. When the signal exceeds a configured threshold, it broadcasts an alert to the mesh network with frequency and signal strength information.
### WSJT-X Integration
Monitors WSJT-X decode messages (FT8, FT4, WSPR, etc.) via UDP and forwards them to the mesh network. You can optionally filter by specific callsigns.
**Features:**
- Listens to WSJT-X UDP broadcasts (default port 2237)
- Decodes WSJT-X protocol messages
- Filters by watched callsigns (or monitors all if no filter is set)
- Forwards decode messages with SNR information to configured mesh channels
**Example Output:**
```
WSJT-X FT8: CQ K7MHI CN87 (+12dB)
```
### JS8Call Integration
Monitors JS8Call messages via TCP API and forwards them to the mesh network. You can optionally filter by specific callsigns.
**Features:**
- Connects to JS8Call TCP API (default port 2442)
- Listens for directed and activity messages
- Filters by watched callsigns (or monitors all if no filter is set)
- Forwards messages with SNR information to configured mesh channels
**Example Output:**
```
JS8Call from W1ABC: HELLO WORLD (+8dB)
```
### Configuration
Configure all radio monitoring features in the `[radioMon]` section of `config.ini`:
```ini
[radioMon]
# Hamlib monitoring
enabled = False
rigControlServerAddress = localhost:4532
signalDetectionThreshold = -10
# WSJT-X monitoring
wsjtxDetectionEnabled = False
wsjtxUdpServerAddress = 127.0.0.1:2237
wsjtxWatchedCallsigns = K7MHI,W1AW
# JS8Call monitoring
js8callDetectionEnabled = False
js8callServerAddress = 127.0.0.1:2442
js8callWatchedCallsigns = K7MHI,W1AW
# Broadcast settings (shared by all radio monitoring)
sigWatchBroadcastCh = 2
sigWatchBroadcastInterface = 1
```
**Configuration Notes:**
- Leave `wsjtxWatchedCallsigns` or `js8callWatchedCallsigns` empty to monitor all callsigns
- Callsigns are comma-separated, case-insensitive
- Both services can run simultaneously
- Messages are broadcast to the same channels as Hamlib alerts
---
@@ -256,18 +463,78 @@ Enable and configure VOX features in the `[vox]` section of `config.ini`.
Configure in `[ollama]` section of `config.ini`.
More at [LLM Readme](llm.md)
---
## Wikipedia Search
| Command | Description |
|--------------|-----------------------------------------------|
| `wiki:` | Search Wikipedia or local Kiwix server |
| `wiki` | Search Wikipedia or local Kiwix server |
Configure in `[wikipedia]` section of `config.ini`.
---
## DX Spotter Module
The DX Spotter module allows you to fetch and display recent DX cluster spots from [spothole.app](https://spothole.app) directly in your mesh-bot.
### Command
| Command | Description |
|---------|------------------------------|
| `dx` | Show recent DX cluster spots |
###Usage
Send a message to the bot containing the `dx` command. You can add filters to narrow down the results:
- **Basic usage:**
```
dx
```
Returns the latest DX spots.
- **With filters:**
```
dx band=20m mode=SSB
dx xota=WWFF
dx by=K7MHI
```
- `band=`: Filter by band (e.g., 20m, 40m)
- `mode=`: Filter by mode (e.g., SSB, CW, FT8)
- `ota=`: Filter by source/group (e.g., WWFF, POTA, SOTA)
- `of=`: Filter by callsign of the spotted DX
### Example Output
```
K7ABC @14.074 MHz FT8 WWFF KFF-1234 by:N0CALL CN87 Some comment
W1XYZ @7.030 MHz CW SOTA W7W/WE-001 by:K7MHI CN88
```
- Each line shows:
`DX_CALL @FREQUENCY MODE GROUP GROUP_REF by:SPOTTER_CALL SPOTTER_GRID COMMENT`
### Notes
- Returns up to 4 of the most recent spots matching your filters.
- Data is fetched from [spothole.app](https://spothole.app/).
- If no spots are found, youll see:
`No DX spots found.`
### Configuration
```ini
[radioMon]
dxspotter_enabled = True
```
---
## 📅 Mesh Bot Scheduler User Guide
Automate messages and tasks using the scheduler module.
@@ -276,18 +543,24 @@ Configure in `[scheduler]` section of `config.ini`.
See modules/custom_scheduler.py for advanced scheduling using python
**Purpose:**
`scheduler.py` provides automated scheduling for Mesh Bot, allowing you to send messages, jokes, weather updates, and custom actions at specific times or intervals.
`scheduler.py` provides automated scheduling for Mesh Bot, allowing you to send messages, jokes, weather updates, news, RSS feeds, marine weather, system info, tide info, sun info, and custom actions at specific times or intervals.
**How to Use:**
- The scheduler is configured via your bots settings or commands, specifying what to send, when, and on which channel/interface.
- Supports daily, weekly, hourly, and minutely schedules, as well as special jobs like jokes and weather.
- Supports daily, weekly, hourly, and minutely schedules, as well as special jobs like jokes, weather, news, RSS feeds, marine weather, system info, tide info, and sun info.
- For advanced automation, you can define your own schedules in `etc/custom_scheduler.py` (copied to `modules/custom_scheduler.py` at install).
**Features:**
- **Basic Scheduling:** Send messages on a set schedule (e.g., every day at 09:00, every Monday at noon, every hour, etc.).
- **Joke Scheduler:** Automatically send jokes at a chosen interval.
- **Weather Scheduler:** Send weather updates at a chosen interval.
- **Custom Scheduler:** Import and run your own scheduled jobs by editing `custom_scheduler.py`.
- **Joke Scheduler:** Automatically send jokes every x min
- **Weather Scheduler:** Send weather updates at time of day, daily.
- **News Scheduler:** Send news updates at specified intervals.
- **RSS Scheduler:** Send RSS feed updates at specified intervals.
- **Marine Weather Scheduler:** Send marine weather forecasts at time of day, daily.
- **System Info Scheduler:** Send system information at specified intervals.
- **Tide Scheduler:** Send tide information at time of day, daily.
- **Sun Scheduler:** Send sun information (sunrise/sunset) at time of day, daily.
- **Custom Scheduler:** run your own scheduled jobs by editing `custom_scheduler.py`.
- **Logging:** All scheduling actions are logged for debugging and monitoring.
**Example Configuration:**
@@ -306,7 +579,6 @@ To send a daily message at 09:00:
- All scheduled jobs run asynchronously as long as the bot is running.
- For troubleshooting, check the logs for scheduler activity and errors.
### Basic Scheduler Options
You can schedule messages or actions using the following options in your configuration:
@@ -348,11 +620,53 @@ You can schedule messages or actions using the following options in your configu
- → Sends a bbslink message every 2 hours.
#### **weather**
- Schedules the bot to send a weather update at the specified interval (in hours).
- Schedules the bot to send a weather update at the specified time of day, daily.
- **Example:**
- Option: `weather`
- Interval: `3`
- → Sends a weather update every 3 hours.
- Time: `08:00`
- → Sends a weather update daily at 8:00a.
#### **news**
- Schedules the bot to send news updates at the specified interval (in hours).
- **Example:**
- Option: `news`
- Interval: `6`
- → Sends news updates every 6 hours.
#### **readrss**
- Schedules the bot to send RSS feed updates at the specified interval (in hours).
- **Example:**
- Option: `readrss`
- Interval: `4`
- → Sends RSS feed updates every 4 hours.
#### **mwx**
- Schedules the bot to send marine weather updates at the specified time of day, daily.
- **Example:**
- Option: `mwx`
- Time: `06:00`
- → Sends marine weather updates daily at 6:00a.
#### **sysinfo**
- Schedules the bot to send system information at the specified interval (in hours).
- **Example:**
- Option: `sysinfo`
- Interval: `12`
- → Sends system information every 12 hours.
#### **tide**
- Schedules the bot to send tide information at the specified time of day, daily.
- **Example:**
- Option: `tide`
- Time: `05:00`
- → Sends tide information daily at 5:00a.
#### **solar**
- Schedules the bot to send sun information (sunrise/sunset) at the specified time of day, daily.
- **Example:**
- Option: `solar`
- Time: `06:00`
- → Sends sun information daily at 6:00a.
---
@@ -368,16 +682,6 @@ You can use any of these options to schedule messages on specific days:
---
### Configuration Fields
- **schedulerValue**: The schedule type (e.g., `day`, `joke`, `weather`, `mon`, etc.)
- **schedulerTime**: The time to run (e.g., `08:00`). Leave blank for interval-based schedules.
- **schedulerInterval**: The interval (e.g., `2` for every 2 hours/days/minutes).
- **schedulerChannel**: The channel number to send to.
- **schedulerInterface**: The device/interface number.
---
## Other Utilities
- `motd` — Message of the day
@@ -538,9 +842,6 @@ If you continue to have issues, review the logs for error messages and consult t
---
### Configuration Guide
The following is documentation for the config.ini file
@@ -720,29 +1021,6 @@ enabled = True
repeater_channels = [2, 3]
```
### Ollama (LLM/AI) Settings
For Ollama to work, the command line `ollama run 'model'` needs to work properly. Ensure you have enough RAM and your GPU is working as expected. The default model for this project is set to `gemma3:270m`. Ollama can be remote [Ollama Server](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-do-i-configure-ollama-server) works on a pi58GB with 40 second or less response time.
```ini
# Enable ollama LLM see more at https://ollama.com
ollama = True # Ollama model to use (defaults to gemma2:2b)
ollamaModel = gemma3:latest # Ollama model to use (defaults to gemma3:270m)
ollamaHostName = http://localhost:11434 # server instance to use (defaults to local machine install)
```
Also see `llm.py` for changing the defaults of:
```ini
# LLM System Variables
rawQuery = True # if True, the input is sent raw to the LLM if False, it is processed by the meshBotAI template
# Used in the meshBotAI template (legacy)
llmEnableHistory = True # enable history for the LLM model to use in responses adds to compute time
llmContext_fromGoogle = True # enable context from google search results helps with responses accuracy
googleSearchResults = 3 # number of google search results to include in the context more results = more compute time
```
Note for LLM in docker with [NVIDIA](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/docker-specialized.html). Needed for the container with ollama running.
### Wikipedia Search Settings
The Wikipedia search module can use either the online Wikipedia API or a local Kiwix server for offline wiki access. Kiwix is especially useful for mesh networks operating in remote or offline environments.
@@ -766,15 +1044,18 @@ To set up a local Kiwix server:
1. Install Kiwix tools: https://kiwix.org/en/ `sudo apt install kiwix-tools -y`
2. Download a Wikipedia ZIM file to `data/`: https://library.kiwix.org/ `wget https://download.kiwix.org/zim/wikipedia/wikipedia_en_100_nopic_2025-09.zim`
3. Run the server: `kiwix-serve --port 8080 wikipedia_en_100_nopic_2025-09.zim`
4. Set `useKiwixServer = True` in your config.ini
4. Set `useKiwixServer = True` in your config.ini with `wikipedia = True`
The bot will automatically extract and truncate content to fit Meshtastic's message size limits (~500 characters).
### Radio Monitoring
A module allowing a Hamlib compatible radio to connect to the bot. When functioning, it will message the configured channel with a message of in use. **Requires hamlib/rigctld to be running as a service.**
Additionally, the module supports monitoring WSJT-X and JS8Call for amateur radio digital modes.
```ini
[radioMon]
# Hamlib monitoring
enabled = True
rigControlServerAddress = localhost:4532
sigWatchBroadcastCh = 2 # channel to broadcast to can be 2,3
@@ -782,8 +1063,30 @@ signalDetectionThreshold = -10 # minimum SNR as reported by radio via hamlib
signalHoldTime = 10 # hold time for high SNR
signalCooldown = 5 # the following are combined to reset the monitor
signalCycleLimit = 5
# WSJT-X monitoring (FT8, FT4, WSPR, etc.)
# Monitors WSJT-X UDP broadcasts and forwards decode messages to mesh
wsjtxDetectionEnabled = False
wsjtxUdpServerAddress = 127.0.0.1:2237 # UDP address and port where WSJT-X broadcasts
wsjtxWatchedCallsigns = # Comma-separated list of callsigns to watch (empty = all)
# JS8Call monitoring
# Connects to JS8Call TCP API and forwards messages to mesh
js8callDetectionEnabled = False
js8callServerAddress = 127.0.0.1:2442 # TCP address and port where JS8Call API listens
js8callWatchedCallsigns = # Comma-separated list of callsigns to watch (empty = all)
# Broadcast settings (shared by Hamlib, WSJT-X, and JS8Call)
sigWatchBroadcastInterface = 1
```
**Setup Notes:**
- **WSJT-X**: Enable UDP Server in WSJT-X settings (File → Settings → Reporting → Enable UDP Server)
- **JS8Call**: Enable TCP Server in JS8Call settings (File → Settings → Reporting → Enable TCP Server API)
- Both services can run simultaneously
- Leave callsign filters empty to monitor all activity
- Callsigns are case-insensitive and comma-separated (e.g., `K7MHI,W1AW`)
### File Monitoring
Some dev notes for ideas of use
@@ -840,39 +1143,5 @@ qrz_hello_string = "send CMD or DM me for more info." # will be sent to all hear
training = True # Training mode will not send the hello message to new nodes, use this to build up database
```
### Scheduler
In the config.ini enable the module
```ini
[scheduler]
enabled = False # enable or disable the scheduler module
interface = 1 # channel to send the message to
channel = 2
message = "MeshBot says Hello! DM for more info."
value = # value can be min,hour,day,mon,tue,wed,thu,fri,sat,sun.
# value can also be joke (everyXmin) or weather (hour) for special scheduled messages
# custom for module/scheduler.py custom schedule examples
interval = # interval to use when time is not set (e.g. every 2 days)
time = # time of day in 24:00 hour format when value is 'day' and interval is not set
```
The basic brodcast message can be setup in condig.ini. For advanced, See the [modules/scheduler.py](modules/scheduler.py) to edit the schedule. See [schedule documentation](https://schedule.readthedocs.io/en/stable/) for more. Recomend to backup changes so they dont get lost.
```python
#Send WX every Morning at 08:00 using handle_wxc function to channel 2 on device 1
schedule.every().day.at("08:00").do(lambda: send_message(handle_wxc(0, 1, 'wx'), 2, 0, 1))
#Send a Net Starting Now Message Every Wednesday at 19:00 using send_message function to channel 2 on device 1
schedule.every().wednesday.at("19:00").do(lambda: send_message("Net Starting Now", 2, 0, 1))
```
#### BBS Link
The scheduler also handles the BBS Link Broadcast message, this would be an example of a mesh-admin channel on 8 being used to pass BBS post traffic between two bots as the initiator, one direction pull. The message just needs to have bbslink
```python
# Send bbslink looking for peers every other day at 10:00 using send_message function to channel 8 on device 1
schedule.every(2).days.at("10:00").do(lambda: send_message("bbslink MeshBot looking for peers", 8, 0, 1))
```
```ini
bbslink_enabled = True
bbslink_whitelist = # list of whitelisted nodes numbers ex: 2813308004,4258675309 empty list allows all
```
Happy meshing!

View File

@@ -129,4 +129,79 @@ This will call the default script located at `script/runShell.sh` and return its
---
## Overview Unit Tests
Your test_bot.py file contains a comprehensive suite of unit tests for the various modules the project. The tests are organized using Pythons `unittest` framework and cover both core utility modules and all major game modules.
---
## Structure
- **Imports & Setup:**
The script sets up the environment, imports all necessary modules, and suppresses certain warnings for clean test output.
- **TestBot Class:**
All tests are methods of the `TestBot` class, which inherits from `unittest.TestCase`.
---
## Core Module Tests
- **Database & Checklist:**
- `test_load_bbsdb`, `test_bbs_list_messages`, `test_initialize_checklist_database`
- **News & Alerts:**
- `test_init_news_sources`, `test_get_nina_alerts`
- **LLM & Wikipedia:**
- `test_llmTool_get_google`, `test_send_ollama_query`, `test_get_wikipedia_summary`, `test_get_kiwix_summary`
- **Space & Weather:**
- `test_get_moon_phase`, `test_get_sun_times`, `test_hf_band_conditions`
- **Radio & Location:**
- `test_get_hamlib`, `test_get_rss_feed`, `get_openskynetwork`, `test_initalize_qrz_database`
---
## Game Module Tests
Each game module has a dedicated test that simulates a typical user interaction:
- **Tic-Tac-Toe:**
Starts a game and makes one move.
- **Video Poker:**
Starts a session and places a bet.
- **Blackjack:**
Starts a game and places a bet.
- **Hangman:**
Starts a game and guesses a letter.
- **Lemonade Stand:**
Starts a game and buys a box of cups.
- **GolfSim:**
Starts a hole and takes a shot.
- **DopeWars:**
Starts a game, selects a city, and checks the list.
- **MasterMind:**
Starts a game and makes one guess.
- **Quiz:**
Starts a quiz, joins as a player, answers one question, and ends the quiz.
- **Survey:**
Starts a survey, answers one question, and ends the survey.
- **HamTest:**
Starts a ham radio test and answers one question.
---
## Extended API Tests
If the `.checkall` file is present, additional API and data-fetching tests are run for:
- RepeaterBook, ArtSciRepeaters, NOAA tides/weather, USGS earthquakes/volcanoes, satellite passes, and more.
## Notes
- Tests are designed to be **non-destructive** and **idempotent**.
- Some tests require specific data files (e.g., for quiz, survey, hamtest).
- The suite is intended to be run from the main program directory.
Happy hacking!

View File

@@ -8,50 +8,56 @@ This document covers the Bulliten Board System or BBS componment of the meshing-
## Table of Contents
1. [BBS Core Functions](#1-bbs-core-functions)
- [Direct Messages (DMs)](#11-direct-messages-dms)
2. [BBS Database Sync: File-Based (Out-of-Band)](#1-bbs-database-sync-file-based-out-of-band)
3. [BBS Over-the-Air (OTA) Sync: Linking](#2-bbs-over-the-air-ota-sync-linking)
4. [Scheduling BBS Sync](#3-scheduling-bbs-sync)
5. [Best Practices](#4-best-practices)
6. [Example: Full Sync Workflow](#5-example-full-sync-workflow)
7. [Troubleshooting](#6-troubleshooting)
8. [API Reference: BBS Sync](#7-api-reference-bbs-sync)
- [Central Message Store](#11-central-message-store)
- [Direct Mail (DM) Messages](#12-direct-mail-dm-messages)
- [BBS Commands](#bbs-commands)
2. [Synchronization bot2bot: Full Sync Workflow](#2-synchronization-bot2bot--full-sync-workflow)
- [BBS Database Sync: File-Based (Out-of-Band)](#21-bbs-database-sync-file-based-out-of-band)
- [BBS Over-the-Air (OTA) Sync: Linking](#22-bbs-over-the-air-ota-sync-linking)
- [Scheduling BBS Auto Sync](#23-scheduling-bbs-auto-sync)
3. [Troubleshooting](#4-troubleshooting)
4. [API Reference: BBS Sync](#5-api-reference-bbs-sync)
5. [Best Practices](#5-best-practices)
## 1. **BBS Core Functions**
The mesh-bot provides a basic message mail system for Meshtastic
## 1.1 **Direct Messages (DMs)**
## 1.1 Central Message Store
### **How DMs Work**
- Direct Messages (DMs) are private messages sent from one node to another.
- DMs are stored separately from public posts in `data/bbsdm.pkl`.
- Each DM entry in the pickle, typically includes: `[id, toNode, message, fromNode, timestamp, threadID, replytoID]`.
- **Shared public message space** for all nodes.
- Classic BBS list with a simple, one-level message tree.
- Messages are stored in `data/bbsdb.pkl`.
- Each entry typically includes:
`[id, subject, body, fromNode, timestamp, threadID, replytoID]`
### **DM Delivery**
- When a DM is posted using `bbs_post_dm(toNode, message, fromNode)`, it is added to the recipient's DM database.
- DMs can be delivered in two ways:
1. **File-Based Sync:**
- The `bbsdm.pkl` file is copied between nodes using SCP, rsync, or other file transfer methods.
- After syncing, the recipient node can check for new DMs using `bbs_check_dm(toNode)`.
2. **Over-the-Air (OTA) Sync:**
- DMs can be exchanged between nodes using the same OTA sync mechanism as other posts.
- The bot will receive (onRX) or detect any packet and deliver the DM/mail to the recipient.
- DMs are only visible to the intended recipient node and are not listed in the public message list.
### Posting to Public
### **DM Commands**
| Command | Description |
|-----------------|---------------------------------------------|
| `bbs_post_dm` | Send a direct message to another node |
| `bbs_check_dm` | Check for new DMs for your node |
| `bbs_delete_dm` | Delete a DM after reading |
To post a public message:
```sh
bbspost $Subject #Message
```
---
## 1.2 Direct Mail (DM) Messages
- **DMs are private messages** sent from one node to another.
- Stored separately from public posts in `data/bbsdm.pkl`.
- Each DM entry typically includes:
`[id, toNode, message, fromNode, timestamp, threadID, replytoID]`
- You can inject DMs directly for automation using the `script/injectDM.py` tool.
### **Message Storage**
The .. database is
- Messages are stored in `data/bbsdb.pkl` (public posts) and `data/bbsdm.pkl` (direct messages).
- Format: Each message is a list, e.g. `[id, subject, body, fromNode, timestamp, threadID, replytoID]`.
### DM Delivery
- To post a DM, use:
```sh
bbspost @USER #Message
```
- When a DM is posted, it is added to the DM database.
- When the bot detects the recipient node on the network, it delivers the DM and then removes it from local storage.
---
### BBS Commands
| Command | Description |
|--------------|-----------------------------------------------|
@@ -64,12 +70,26 @@ The .. database is
| `bbslink` | Link messages between BBS systems |
---
Enable in `[bbs]` section of `config.ini`.
## 1. **BBS Database Sync: File-Based (Out-of-Band)**
## 2. **Synchronization bot2bot : Full Sync Workflow**
1. **Set up a dedicated sync channel** (e.g., channel bot-admin).
2. **Configure both nodes** with `bbs_link_enabled = True` and add each other to `bbs_link_whitelist`.
3. **Schedule sync** every hour:
- Node A sends `bbslink 0` to Node B on channel 99.
- Node B responds with messages and `bbsack`.
4. **Optionally, use SSH/scp** to copy `bbsdb.pkl` for full out-of-band backup.
## 2.1. **BBS Database Sync: File-Based (Out-of-Band)**
### **Manual/Automated File Sync (e.g., SSH/SCP)**
- **Purpose:** Sync BBS data between nodes by copying `bbsdb.pkl` and `bbsdm.pkl` files.
```ini
[bbs]
# The "api" needs enabled which enables file polling
bbsAPI_enabled = True
```
- **How-To:**
1. **Locate Files:**
- `data/bbsdb.pkl` (public posts)
@@ -88,7 +108,7 @@ Enable in `[bbs]` section of `config.ini`.
---
## 2. **BBS Over-the-Air (OTA) Sync: Linking**
## 2.2. **BBS Over-the-Air (OTA) Sync: Linking**
### **How OTA Sync Works**
- Nodes can exchange BBS messages using special commands over the mesh network.
- Uses `bbslink` and `bbsack` commands for message exchange.
@@ -109,36 +129,43 @@ Enable in `[bbs]` section of `config.ini`.
- For high-reliability sync, configure a dedicated channel (not used for chat).
---
## 3. **Scheduling BBS Sync**
## 2.3. **Scheduling BBS Auto Sync**
### **Using the Bots Scheduler**
- You can schedule periodic sync requests to a peer node.
- Example: Every hour, send a `bbslink` request to a peer.
see more at [Module Readme](README.md#scheduler)
---
## 4. **Best Practices**
- **Backup:** Regularly back up `bbsdb.pkl` and `bbsdm.pkl`.
- **Security:** Use SSH keys for file transfer; restrict OTA sync to trusted nodes.
- **Reliability:** Use a dedicated channel for BBS sync to avoid chat congestion.
- **Automation:** Use the scheduler for regular syncs, both file-based and OTA.
---
## 5. **Example: Full Sync Workflow**
#### BBS Link
The scheduler also handles the BBS Link Broadcast message, this would be an example of a mesh-admin channel on 8 being used to pass BBS post traffic between two bots as the initiator, one direction pull. The message just needs to have bbslink
1. **Set up a dedicated sync channel** (e.g., channel bot-admin).
2. **Configure both nodes** with `bbs_link_enabled = True` and add each other to `bbs_link_whitelist`.
3. **Schedule sync** every hour:
- Node A sends `bbslink 0` to Node B on channel 99.
- Node B responds with messages and `bbsack`.
4. **Optionally, use SSH/scp** to copy `bbsdb.pkl` for full out-of-band backup.
```ini
[bbs]
bbslink_enabled = True
bbslink_whitelist = # list of whitelisted nodes numbers ex: 2813308004,4258675309 empty list allows all
[scheduler]
enabled = True
interface = 1
channel = 2
value = link
interval = 12 # 12 hours
```
```python
# Custom Schedule Example if using custom for [scheduler]
# Send bbslink looking for peers every 2 days at 10 AM
schedule.every(2).days.at("10:00").do(send_message("bbslink MeshBot looking for peers", schedulerChannel, 0, schedulerInterface))
```
---
## 6. **Troubleshooting**
---
## 4. **Troubleshooting**
- **Messages not syncing?**
- Check `bbs_link_enabled` and whitelist settings.
@@ -149,7 +176,19 @@ see more at [Module Readme](README.md#scheduler)
- Verify file permissions and paths.
- Ensure the bot reloads the database after file copy.
## 7. **API Reference: BBS Sync**
- **Custom file problems?**
- remove the custom_scheduler.py and replace it with [etc/custom_scheduler.py](etc/custom_scheduler.py)
The bbs link command should include `bbslink`
`.do(send_message("bbslink MeshBot looking for peers", schedulerChannel, 0, schedulerInterface))`
```ini
[bbs]
# The "api" needs enabled which enables file polling and use of `script/injectDM.py`
bbsAPI_enabled = True
```
## 5. **API Reference: BBS Sync**
### **Key Functions in Python**
| Function | Purpose | Usage Example |
@@ -185,5 +224,11 @@ Future Use
- Receiving node uses `bbs_receive_compressed()`.
---
### 5. **Best Practices**
- **Backup:** Regularly back up `bbsdb.pkl` and `bbsdm.pkl`.
- **Security:** Use SSH keys for file transfer; restrict OTA sync to trusted nodes.
- **Reliability:** Use a dedicated channel for BBS sync to avoid chat congestion.
- **Automation:** Use the scheduler for regular syncs, both file-based and OTA.
---

View File

@@ -2,7 +2,8 @@
# K7MHI Kelly Keeton 2024
import pickle # pip install pickle
from modules.log import *
from modules.log import logger
from modules.settings import bbs_admin_list, bbs_ban_list, MESSAGE_CHUNK_SIZE, bbs_link_enabled, bbs_link_whitelist, responseDelay
import time
from datetime import datetime
@@ -21,30 +22,32 @@ bbs_dm = []
def load_bbsdb():
global bbs_messages
# load the bbs messages from the database file
try:
with open('data/bbsdb.pkl', 'rb') as f:
new_bbs_messages = pickle.load(f)
if isinstance(new_bbs_messages, list):
for msg in new_bbs_messages:
#example [1, 'Welcome to meshBBS', 'Welcome to the BBS, please post a message!', 0]
msgHash = hash(tuple(msg[1:3])) # Create a hash of the message content (subject and body)
# Check if the message already exists in bbs_messages
msgHash = hash(tuple(msg[1:3]))
if all(hash(tuple(existing_msg[1:3])) != msgHash for existing_msg in bbs_messages):
# if the message is not a duplicate, add it to bbs_messages Maintain the message ID sequence
new_id = len(bbs_messages) + 1
bbs_messages.append([new_id, msg[1], msg[2], msg[3]])
return True # Loaded successfully, regardless of whether new messages were added
return False # File existed but did not contain a valid list of messages (possibly corrupted)
except FileNotFoundError:
# create a new bbsdb.pkl with a welcome message
# template ([messageID, subject, message, fromNode, now, thread, replyto])
bbs_messages = [[1, "Welcome to meshBBS", "Welcome to the BBS, please post a message!",0,time.strftime('%Y-%m-%d %H:%M:%S'),0,0]]
logger.debug("System: bbsdb.pkl not found, creating new one")
bbs_messages = [[1, "Welcome to meshBBS", "Welcome to the BBS, please post a message!",0]]
try:
with open('data/bbsdb.pkl', 'wb') as f:
pickle.dump(bbs_messages, f)
return True
except Exception as e:
logger.error(f"System: Error creating bbsdb.pkl: {e}")
return False
except Exception as e:
logger.error(f"System: Error loading bbsdb.pkl: {e}")
bbs_messages = [[1, "Welcome to meshBBS", "Welcome to the BBS, please post a message!",0]]
return False
def save_bbsdb():
global bbs_messages

396
modules/checklist.md Normal file
View File

@@ -0,0 +1,396 @@
# Enhanced Check-in/Check-out System
## Overview
The enhanced checklist module provides asset tracking and accountability features with advanced safety monitoring capabilities. This system is designed for scenarios where tracking people, equipment, or assets is critical for safety, accountability, or logistics.
## Key Features
### 🔐 Basic Check-in/Check-out
- Simple interface for tracking when people or assets are checked in or out
- Automatic duration calculation
- Location tracking (GPS coordinates if available)
- Notes support for additional context
### ⏰ Safety Monitoring with Time Intervals
- Set expected check-in intervals for safety (minimal 20min)
- Automatic tracking of overdue check-ins
- Ideal for solo activities, remote work, or high-risk operations
- Get alerts when someone hasn't checked in within their expected timeframe
### ✅ Approval Workflow
- Admin approval system for check-ins
- Deny/remove unauthorized check-ins
- Maintain accountability and control
### 📍 Location Tracking
- Automatic GPS location capture when checking in/out
- View last known location in checklist
- Track movement over time
- **Time Window Monitoring**: Check-in with safety intervals (e.g., `checkin 60 Hunting in tree stand`)
- Tracks if users don't check in within expected timeframe
- Ideal for solo activities, remote work, or safety accountability
- Provides `get_overdue_checkins()` function for alert integration
- **Approval Workflow**:
- `checklistapprove <id>` - Approve pending check-ins (admin)
- `checklistdeny <id>` - Deny/remove check-ins (admin)
- Support for approval-based workflows
- **Enhanced Database Schema**:
- Added `approved` field for approval workflows
- Added `expected_checkin_interval` field for safety monitoring
- Automatic migration for existing databases
#### New Commands:
- `checklistapprove <id>` - Approve a check-in
- `checklistdeny <id>` - Deny a check-in
- Enhanced `checkin [interval] [note]` - Now supports interval parameter
## Configuration
Add to your `config.ini`:
```ini
[checklist]
enabled = True
checklist_db = data/checklist.db
# Set to True to reverse the meaning of checkin/checkout
reverse_in_out = False
```
## Commands Reference
### Basic Commands
#### Check In
```
checkin [interval] [notes]
```
Check in to the system. Optionally specify a monitoring interval in minutes.
**Examples:**
```
checkin Arrived at base camp
checkin 30 Solo hiking on north trail
checkin 60 Working alone in tree stand
checkin Going hunting
```
#### Check Out
```
checkout [notes]
```
Check out from the system. Shows duration since check-in.
**Examples:**
```
checkout Heading back
checkout Mission complete
checkout
```
#### View Checklist
```
checklist
```
Shows all active check-ins with durations.
**Example Response:**
```
ID: Hunter1 checked-In for 01:23:45📝Solo hunting
ID: Tech2 checked-In for 00:15:30📝Equipment repair
```
#### Purge Records
```
purgein # Delete your check-in record
purgeout # Delete your check-out record
```
Use these to manually remove your records if needed.
### Admin Commands
#### Approve Check-in
```
checklistapprove <checkin_id>
```
Approve a pending check-in (requires admin privileges).
**Example:**
```
checklistapprove 123
```
#### Deny Check-in
```
checklistdeny <checkin_id>
```
Deny and remove a check-in (requires admin privileges).
**Example:**
```
checklistdeny 456
```
## Safety Monitoring Feature
### How Time Intervals Work
When checking in with an interval parameter, the system will track whether you check in again or check out within that timeframe.
```
checkin 60 Hunting in remote area
```
This tells the system:
- You're checking in now
- You expect to check in again or check out within 60 minutes
- If 60 minutes pass without activity, you'll be marked as overdue
### Use Cases for Time Intervals
1. **Solo Activities**: Hunting, hiking, or working alone
```
checkin 30 Solo patrol north sector
```
2. **High-Risk Operations**: Tree work, equipment maintenance
```
checkin 45 Climbing tower for antenna work
```
3. **Remote Work**: Working in isolated areas
```
checkin 120 Survey work in remote canyon
```
4. **Check-in Points**: Regular status updates during long operations
```
checkin 15 Descending cliff face
```
### Overdue Check-ins
The system tracks all check-ins with time intervals and can identify who is overdue. The module provides the `get_overdue_checkins()` function that returns a list of overdue users.
**Note**: Automatic alerts for overdue check-ins require integration with the bot's scheduler or alert system. The checklist module provides the detection capability, but sending notifications must be configured separately through the main bot's alert features.
## Practical Examples
### Example 1: Hunting Scenario
Hunter checks in before going into the field:
```
checkin 60 Hunting deer stand #3, north 40
```
System response:
```
Checked✅In: Hunter1 (monitoring every 60min)
```
If the hunter doesn't check out or check in again within 60 minutes, they will appear on the overdue list.
When done hunting:
```
checkout Heading back to camp
```
System response:
```
Checked⌛Out: Hunter1 duration 02:15:30
```
### Example 2: Emergency Response Team
Team leader tracks team members:
```
# Team members check in
checkin 30 Search grid A-1
checkin 30 Search grid A-2
checkin 30 Search grid A-3
```
Team leader views status:
```
checklist
```
Response shows all active searchers with their durations.
### Example 3: Equipment Checkout
Track equipment loans:
```
checkin Radio #5 for field ops
```
When equipment is returned:
```
checkout Equipment returned
```
### Example 4: Site Survey
Field technicians checking in at locations:
```
# At first site
checkin 45 Site survey tower location 1
# Moving to next site (automatically checks out from first)
checkin 45 Site survey tower location 2
```
## Integration with Other Systems
### Geo-Location Awareness
The checklist system automatically captures GPS coordinates when available. This can be used for:
- Tracking last known position
- Geo-fencing applications
- Emergency response coordination
- Asset location management
### Alert Systems
The overdue check-in feature can trigger:
- Notifications to supervisors
- Emergency alerts
- Automated messages to response teams
- Email/SMS notifications (if configured)
### Scheduling Integration
Combine with the scheduler module to:
- Send reminders to check in
- Automatically generate reports
- Schedule periodic check-in requirements
- Send daily summaries
## Best Practices
### For Users
1. **Always Include Context**: Add notes when checking in
```
checkin 30 North trail maintenance
```
Not just:
```
checkin
```
2. **Set Realistic Intervals**: Don't set intervals too short or too long
- Too short: False alarms
- Too long: Defeats safety purpose
3. **Check Out Promptly**: Always check out when done to clear your status
4. **Use Consistent Naming**: If tracking equipment, use consistent names
### For Administrators
1. **Review Checklist Regularly**: Monitor who is checked in
```
checklist
```
2. **Respond to Overdue Situations**: Act on overdue check-ins promptly
3. **Set Clear Policies**: Establish when and how to use the system
4. **Train Users**: Ensure everyone knows how to use time intervals
5. **Test the System**: Regularly verify the system is working
## Safety Scenarios
### Scenario 1: Tree Stand Hunting
```
checkin 60 Hunting from tree stand at north plot
```
If hunter falls or has medical emergency, they'll be marked overdue after 60 minutes.
### Scenario 2: Equipment Maintenance
```
checkin 30 Generator maintenance at remote site
```
If technician encounters danger, overdue status can be detected. Note: Requires alert system integration to send notifications.
### Scenario 3: Hiking
```
checkin 120 Day hike to mountain peak
```
Longer interval for extended activity, but still provides safety net.
### Scenario 4: Watchstanding
```
checkin 240 Night watch duty
```
Regular check-ins every 4 hours ensure person is alert and safe.
## Database Schema
### checkin Table
```sql
CREATE TABLE checkin (
checkin_id INTEGER PRIMARY KEY,
checkin_name TEXT,
checkin_date TEXT,
checkin_time TEXT,
location TEXT,
checkin_notes TEXT,
approved INTEGER DEFAULT 1,
expected_checkin_interval INTEGER DEFAULT 0
)
```
### checkout Table
```sql
CREATE TABLE checkout (
checkout_id INTEGER PRIMARY KEY,
checkout_name TEXT,
checkout_date TEXT,
checkout_time TEXT,
location TEXT,
checkout_notes TEXT
)
```
## Reverse Mode
Setting `reverse_in_out = True` in config swaps the meaning of checkin and checkout commands. This is useful if your organization uses opposite terminology.
With `reverse_in_out = True`:
- `checkout` command performs a check-in
- `checkin` command performs a check-out
## Migration from Basic Checklist
The enhanced checklist is backward compatible with the basic version. Existing check-ins will continue to work, and new features are optional. The database will automatically upgrade to add new columns when first accessed.
## Troubleshooting
### Not Seeing Overdue Alerts
The overdue detection is built into the module, but alerts need to be configured in the main bot scheduler. Check your scheduler configuration.
### Wrong Duration Shown
Duration is calculated from check-in time to current time. If system clock is wrong, durations will be incorrect. Ensure system time is accurate.
### Can't Approve/Deny Check-ins
These are admin-only commands. Check that your node ID is in the `bbs_admin_list`.
## Support
For issues or feature requests, please file an issue on the GitHub repository.

View File

@@ -2,24 +2,56 @@
# K7MHI Kelly Keeton 2024
import sqlite3
from modules.log import *
from modules.log import logger
from modules.settings import checklist_db, reverse_in_out, bbs_ban_list
import time
trap_list_checklist = ("checkin", "checkout", "checklist", "purgein", "purgeout")
trap_list_checklist = ("checkin", "checkout", "checklist", "purgein", "purgeout",
"checklistapprove", "checklistdeny", "checklistadd", "checklistremove")
def initialize_checklist_database():
# create the database
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
# Check if the checkin table exists, and create it if it doesn't
c.execute('''CREATE TABLE IF NOT EXISTS checkin
(checkin_id INTEGER PRIMARY KEY, checkin_name TEXT, checkin_date TEXT, checkin_time TEXT, location TEXT, checkin_notes TEXT)''')
# Check if the checkout table exists, and create it if it doesn't
c.execute('''CREATE TABLE IF NOT EXISTS checkout
(checkout_id INTEGER PRIMARY KEY, checkout_name TEXT, checkout_date TEXT, checkout_time TEXT, location TEXT, checkout_notes TEXT)''')
conn.commit()
conn.close()
logger.debug("System: Ensured data/checklist.db exists with required tables")
try:
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
# Check if the checkin table exists, and create it if it doesn't
logger.debug("System: Checklist: Initializing database...")
c.execute('''CREATE TABLE IF NOT EXISTS checkin
(checkin_id INTEGER PRIMARY KEY, checkin_name TEXT, checkin_date TEXT,
checkin_time TEXT, location TEXT, checkin_notes TEXT,
approved INTEGER DEFAULT 1, expected_checkin_interval INTEGER DEFAULT 0)''')
# Check if the checkout table exists, and create it if it doesn't
c.execute('''CREATE TABLE IF NOT EXISTS checkout
(checkout_id INTEGER PRIMARY KEY, checkout_name TEXT, checkout_date TEXT,
checkout_time TEXT, location TEXT, checkout_notes TEXT)''')
# Add new columns if they don't exist (for migration)
try:
c.execute("ALTER TABLE checkin ADD COLUMN approved INTEGER DEFAULT 1")
except sqlite3.OperationalError:
pass # Column already exists
try:
c.execute("ALTER TABLE checkin ADD COLUMN expected_checkin_interval INTEGER DEFAULT 0")
except sqlite3.OperationalError:
pass # Column already exists
try:
c.execute("ALTER TABLE checkin ADD COLUMN removed INTEGER DEFAULT 0")
except sqlite3.OperationalError:
pass # Column already exists
# Add this to your DB init (if not already present)
try:
c.execute("ALTER TABLE checkout ADD COLUMN removed INTEGER DEFAULT 0")
except sqlite3.OperationalError:
pass # Column already exists
conn.commit()
conn.close()
return True
except Exception as e:
logger.error(f"Checklist: Failed to initialize database: {e}")
return False
def checkin(name, date, time, location, notes):
location = ", ".join(map(str, location))
@@ -54,7 +86,7 @@ def delete_checkin(checkin_id):
def checkout(name, date, time_str, location, notes):
location = ", ".join(map(str, location))
# checkout a user
checkin_record = None # Ensure variable is always defined
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
try:
@@ -74,18 +106,21 @@ def checkout(name, date, time_str, location, notes):
if checkin_record:
c.execute("INSERT INTO checkout (checkout_name, checkout_date, checkout_time, location, checkout_notes) VALUES (?, ?, ?, ?, ?)", (name, date, time_str, location, notes))
# calculate length of time checked in
c.execute("SELECT checkin_time FROM checkin WHERE checkin_id = ?", (checkin_record[0],))
checkin_time = c.fetchone()[0]
checkin_datetime = time.strptime(date + " " + checkin_time, "%Y-%m-%d %H:%M:%S")
c.execute("SELECT checkin_time, checkin_date FROM checkin WHERE checkin_id = ?", (checkin_record[0],))
checkin_time, checkin_date = c.fetchone()
checkin_datetime = time.strptime(checkin_date + " " + checkin_time, "%Y-%m-%d %H:%M:%S")
time_checked_in_seconds = time.time() - time.mktime(checkin_datetime)
timeCheckedIn = time.strftime("%H:%M:%S", time.gmtime(time_checked_in_seconds))
# # remove the checkin record older than the checkout
# c.execute("DELETE FROM checkin WHERE checkin_date < ? OR (checkin_date = ? AND checkin_time < ?)", (date, date, time_str))
except sqlite3.OperationalError as e:
if "no such table" in str(e):
conn.close()
initialize_checklist_database()
c.execute("INSERT INTO checkout (checkout_name, checkout_date, checkout_time, location, checkout_notes) VALUES (?, ?, ?, ?, ?)", (name, date, time_str, location, notes))
# Try again after initializing
return checkout(name, date, time_str, location, notes)
else:
conn.close()
raise
conn.commit()
conn.close()
@@ -106,18 +141,165 @@ def delete_checkout(checkout_id):
conn.close()
return "Checkout deleted." + str(checkout_id)
def approve_checkin(checkin_id):
"""Approve a pending check-in"""
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
try:
c.execute("UPDATE checkin SET approved = 1 WHERE checkin_id = ?", (checkin_id,))
if c.rowcount == 0:
conn.close()
return f"Check-in ID {checkin_id} not found."
conn.commit()
conn.close()
return f"✅ Check-in {checkin_id} approved."
except Exception as e:
conn.close()
logger.error(f"Checklist: Error approving check-in: {e}")
return "Error approving check-in."
def deny_checkin(checkin_id):
"""Deny/delete a pending check-in"""
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
try:
c.execute("DELETE FROM checkin WHERE checkin_id = ?", (checkin_id,))
if c.rowcount == 0:
conn.close()
return f"Check-in ID {checkin_id} not found."
conn.commit()
conn.close()
return f"❌ Check-in {checkin_id} denied and removed."
except Exception as e:
conn.close()
logger.error(f"Checklist: Error denying check-in: {e}")
return "Error denying check-in."
def set_checkin_interval(name, interval_minutes):
"""Set expected check-in interval for a user (for safety monitoring)"""
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
try:
# Update the most recent active check-in for this user
c.execute("""
UPDATE checkin
SET expected_checkin_interval = ?
WHERE checkin_name = ?
AND checkin_id NOT IN (
SELECT checkin_id FROM checkout
WHERE checkout_name = checkin_name
AND (checkout_date > checkin_date OR (checkout_date = checkin_date AND checkout_time > checkin_time))
)
ORDER BY checkin_date DESC, checkin_time DESC
LIMIT 1
""", (interval_minutes, name))
if c.rowcount == 0:
conn.close()
return f"No active check-in found for {name}."
conn.commit()
conn.close()
return f"⏰ Check-in interval set to {interval_minutes} minutes for {name}."
except Exception as e:
conn.close()
logger.error(f"Checklist: Error setting check-in interval: {e}")
return "Error setting check-in interval."
def get_overdue_checkins():
"""Get list of users who haven't checked in within their expected interval"""
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
current_time = time.time()
try:
c.execute("""
SELECT checkin_id, checkin_name, checkin_date, checkin_time, expected_checkin_interval, location, checkin_notes
FROM checkin
WHERE expected_checkin_interval > 0
AND approved = 1
AND checkin_id NOT IN (
SELECT checkin_id FROM checkout
WHERE checkout_name = checkin_name
AND (checkout_date > checkin_date OR (checkout_date = checkin_date AND checkout_time > checkin_time))
)
""")
active_checkins = c.fetchall()
conn.close()
overdue_list = []
for checkin_id, name, date, time_str, interval, location, notes in active_checkins:
checkin_datetime = time.mktime(time.strptime(f"{date} {time_str}", "%Y-%m-%d %H:%M:%S"))
time_since_checkin = (current_time - checkin_datetime) / 60 # in minutes
if time_since_checkin > interval:
overdue_minutes = int(time_since_checkin - interval)
overdue_list.append({
'id': checkin_id,
'name': name,
'location': location,
'overdue_minutes': overdue_minutes,
'interval': interval,
'checkin_notes': notes
})
return overdue_list
except sqlite3.OperationalError as e:
conn.close()
if "no such table" in str(e):
initialize_checklist_database()
return get_overdue_checkins()
logger.error(f"Checklist: Error getting overdue check-ins: {e}")
return []
def format_overdue_alert():
try:
"""Format overdue check-ins as an alert message"""
overdue = get_overdue_checkins()
logger.debug(f"Overdue check-ins: {overdue}") # Add this line
if not overdue:
return None
alert = "⚠️ OVERDUE CHECK-INS:\n"
for entry in overdue:
hours = entry['overdue_minutes'] // 60
minutes = entry['overdue_minutes'] % 60
alert += f"{entry['name']}: {hours}h {minutes}m overdue"
# if entry['location']:
# alert += f" @ {entry['location']}"
if entry['checkin_notes']:
alert += f" 📝{entry['checkin_notes']}"
alert += "\n"
return alert.rstrip()
except Exception as e:
logger.error(f"Checklist: Error formatting overdue alert: {e}")
return None
def list_checkin():
# list checkins
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
c.execute("""
SELECT * FROM checkin
WHERE checkin_id NOT IN (
SELECT checkin_id FROM checkout
WHERE checkout_date > checkin_date OR (checkout_date = checkin_date AND checkout_time > checkin_time)
)
""")
rows = c.fetchall()
try:
c.execute("""
SELECT * FROM checkin
WHERE removed = 0
AND checkin_id NOT IN (
SELECT checkin_id FROM checkout
WHERE checkout_date > checkin_date OR (checkout_date = checkin_date AND checkout_time > checkin_time)
)
""")
rows = c.fetchall()
except sqlite3.OperationalError as e:
if "no such table" in str(e):
conn.close()
initialize_checklist_database()
return list_checkin()
else:
conn.close()
logger.error(f"Checklist: Error listing checkins: {e}")
return "Error listing checkins."
conn.close()
timeCheckedIn = ""
checkin_list = ""
@@ -132,7 +314,7 @@ def list_checkin():
timeCheckedIn = f"{days}d {hours:02}:{minutes:02}:{seconds:02}"
else:
timeCheckedIn = f"{hours:02}:{minutes:02}:{seconds:02}"
checkin_list += "ID: " + row[1] + " checked-In for " + timeCheckedIn
checkin_list += "ID: " + str(row[0]) + " " + row[1] + " checked-In for " + timeCheckedIn
if row[5] != "":
checkin_list += "📝" + row[5]
if row != rows[-1]:
@@ -149,31 +331,94 @@ def process_checklist_command(nodeID, message, name="none", location="none"):
if str(nodeID) in bbs_ban_list:
logger.warning("System: Checklist attempt from the ban list")
return "unable to process command"
message_lower = message.lower()
parts = message.split()
try:
comment = message.split(" ", 1)[1]
comment = message.split(" ", 1)[1] if len(parts) > 1 else ""
except IndexError:
comment = ""
# handle checklist commands
if ("checkin" in message.lower() and not reverse_in_out) or ("checkout" in message.lower() and reverse_in_out):
return checkin(name, current_date, current_time, location, comment)
elif ("checkout" in message.lower() and not reverse_in_out) or ("checkin" in message.lower() and reverse_in_out):
if ("checkin" in message_lower and not reverse_in_out) or ("checkout" in message_lower and reverse_in_out):
# Check if interval is specified: checkin 60 comment
interval = 0
actual_comment = comment
if comment and parts[1].isdigit():
interval = int(parts[1])
actual_comment = " ".join(parts[2:]) if len(parts) > 2 else ""
result = checkin(name, current_date, current_time, location, actual_comment)
# Set interval if specified
if interval > 0:
set_checkin_interval(name, interval)
result += f" (monitoring every {interval}min)"
return result
elif ("checkout" in message_lower and not reverse_in_out) or ("checkin" in message_lower and reverse_in_out):
return checkout(name, current_date, current_time, location, comment)
elif "purgein" in message.lower():
return delete_checkin(nodeID)
elif "purgeout" in message.lower():
return delete_checkout(nodeID)
elif "?" in message.lower():
elif "purgein" in message_lower:
return mark_checkin_removed_by_name(name)
elif "purgeout" in message_lower:
return mark_checkout_removed_by_name(name)
elif message_lower.startswith("checklistapprove "):
try:
checkin_id = int(parts[1])
return approve_checkin(checkin_id)
except (ValueError, IndexError):
return "Usage: checklistapprove <checkin_id>"
elif message_lower.startswith("checklistdeny "):
try:
checkin_id = int(parts[1])
return deny_checkin(checkin_id)
except (ValueError, IndexError):
return "Usage: checklistdeny <checkin_id>"
elif "?" in message_lower:
if not reverse_in_out:
return ("Command: checklist followed by\n"
"checkout to check out\n"
"purgeout to delete your checkout record\n"
"Example: checkin Arrived at park")
"checkin [interval] [note]\n"
"checkout [note]\n"
"purgein - delete your checkin\n"
"purgeout - delete your checkout\n"
"checklistapprove <id> - approve checkin\n"
"checklistdeny <id> - deny checkin\n"
"Example: checkin 60 Hunting in tree stand")
else:
return ("Command: checklist followed by\n"
"checkin to check out\n"
"purgeout to delete your checkin record\n"
"Example: checkout Leaving park")
elif "checklist" in message.lower():
"checkout [interval] [note]\n"
"checkin [note]\n"
"purgeout - delete your checkout\n"
"purgein - delete your checkin\n"
"Example: checkout 60 Leaving park")
elif "checklist" in message_lower:
return list_checkin()
else:
return "Invalid command."
return "Invalid command."
def mark_checkin_removed_by_name(name):
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
c.execute("UPDATE checkin SET removed = 1 WHERE checkin_name = ?", (name,))
affected = c.rowcount
conn.commit()
conn.close()
return f"Marked {affected} check-in(s) as removed for {name}."
def mark_checkout_removed_by_name(name):
conn = sqlite3.connect(checklist_db)
c = conn.cursor()
c.execute("UPDATE checkout SET removed = 1 WHERE checkout_name = ?", (name,))
affected = c.rowcount
conn.commit()
conn.close()
return f"Marked {affected} checkout(s) as removed for {name}."

202
modules/dxspot.py Normal file
View File

@@ -0,0 +1,202 @@
# meshing-around modules/dxspot.py - Handles DX Spotter integration
# Fetches DX spots from Spothole API based on user commands
# 2025 K7MHI Kelly Keeton
import requests
import datetime
from modules.log import logger
from modules.settings import latitudeValue, longitudeValue
trap_list_dxspotter = ["dx"]
def handledxcluster(message, nodeID, deviceID):
from modules.dxspot import get_spothole_spots
if "DX" in message.upper():
logger.debug(f"System: DXSpotter: Device:{deviceID} Handler: DX Spot Request Received from Node {nodeID}")
band = None
mode = None
source = None
dx_call = None
parts = message.split()
for part in parts:
if part.lower().startswith("band="):
band = part.split("=")[1]
elif part.lower().startswith("mode="):
mode = part.split("=")[1]
elif part.lower().startswith("ota="):
source = part.split("=")[1]
elif part.lower().startswith("of="):
dx_call = part.split("=")[1]
# Build params dict
params = {}
if source:
params["source"] = source.upper()
if band:
params["band"] = band.lower()
if mode:
params["mode"] = mode.upper()
if dx_call:
params["dx_call"] = dx_call.upper()
# Fetch spots
spots = get_spothole_spots(**params)
if spots:
response_lines = []
for spot in spots[:5]:
callsign = spot.get('dx_call', spot.get('callsign', 'N/A'))
freq_hz = spot.get('freq', spot.get('frequency', None))
frequency = f"{float(freq_hz)/1e6:.3f} MHz" if freq_hz else "N/A"
mode_val = spot.get('mode', 'N/A')
comment = spot.get('comment', '')
if len(comment) > 111: # Truncate comment to 111 chars
comment = comment[:111] + '...'
sig = spot.get('sig', '')
de_grid = spot.get('de_grid', '')
de_call = spot.get('de_call', '')
sig_ref_name = spot.get('sig_refs_names', [''])[0] if spot.get('sig_refs_names') else ''
line = f"{callsign} @{frequency} {mode_val} {sig} {sig_ref_name} by:{de_call} {de_grid} {comment}"
response_lines.append(line)
response = "\n".join(response_lines)
else:
response = "No DX spots found."
return response
return "Error: No DX command found."
def get_spothole_spots(source=None, band=None, mode=None, date=None, dx_call=None, de_continent=None, de_location=None):
"""
Fetches spots from https://spothole.app/api/v1/spots with optional filters.
Returns a list of spot dicts.
"""
url = "https://spothole.app/api/v1/spots"
params = {}
fetched_count = 0
# Add administrative filters if provided
qrt = False # Always fetch active spots
needs_sig = False # Always need spots wth a group ike Xota
limit = 4
dedupe = True
params["dedupe"] = str(dedupe).lower()
params["limit"] = limit
params["qrt"] = str(qrt).lower()
params["needs_sig"] = str(needs_sig).lower()
params["needs_sig_ref"] = 'true'
# Only get spots from last 9 hours
received_since_dt = datetime.datetime.utcnow() - datetime.timedelta(hours=9)
received_since = int(received_since_dt.timestamp())
params["received_since"] = received_since
# Add spot filters if provided
if source:
params["source"] = source
if band:
params["band"] = band
if mode:
params["mode"] = mode
if date:
# date should be a string in YYYY-MM-DD or datetime.date
if isinstance(date, datetime.date):
params["date"] = date.isoformat()
else:
params["date"] = date
try:
headers = {"User-Agent": "meshing-around-dxspotter/1.0"}
response = requests.get(url, params=params, headers=headers)
response.raise_for_status()
spots = response.json()
except Exception as e:
logger.debug(f"Error fetching spots: {e}")
spots = []
fetched_count = len(spots)
# Admin Filters done via config.ini
de_grid = None # e.g., "EM00"
de_dxcc_id = None # e.g., "291"
de_call = None # e.g., "K7MHI"
dx_itu_zone = None # e.g., "3"
dx_cq_zone = None # e.g., "4"
dx_dxcc_id = None # e.g., "291"
# spotter filters
# location filter
de_latitude = None # e.g., 34.05
de_longitude = None # e.g., -118.25
if de_location:
de_latitude, de_longitude = de_location
elif de_latitude is not None and de_longitude is not None:
de_latitude = latitudeValue
de_longitude = longitudeValue
if de_latitude and de_longitude:
lat_range = (de_latitude - 1.0, de_latitude + 1.0)
lon_range = (de_longitude - 1.0, de_longitude + 1.0)
spots = [spot for spot in spots if lat_range[0] <= spot.get('de_latitude', 0) <= lat_range[1] and
lon_range[0] <= spot.get('de_longitude', 0) <= lon_range[1]]
# grid filter
if de_grid:
spots = [spot for spot in spots if spot.get('de_grid', '').upper() == de_grid.upper()]
# DXCC Filters
if de_dxcc_id:
spots = [spot for spot in spots if str(spot.get('de_dxcc_id', '')) == str(de_dxcc_id)]
# By reporting callsign
if de_call:
spots = [spot for spot in spots if spot.get('de_call', '').upper() == de_call.upper()]
# DX spotted in zone
if dx_itu_zone:
spots = [spot for spot in spots if str(spot.get('dx_itu_zone', '')) == str(dx_itu_zone)]
if dx_cq_zone:
spots = [spot for spot in spots if str(spot.get('dx_cq_zone', '')) == str(dx_cq_zone)]
if dx_dxcc_id:
spots = [spot for spot in spots if str(spot.get('dx_dxcc_id', '')) == str(dx_dxcc_id)]
# User Runtime Filters
# Filter by dx_call if provided
if dx_call:
spots = [spot for spot in spots if spot.get('dx_call', '').upper() == dx_call.upper()]
# Filter by de_continent if provided
if de_continent:
spots = [spot for spot in spots if spot.get('de_continent', '').upper() == de_continent.upper()]
# Filter by de_location if provided
if de_location:
spots = [spot for spot in spots if spot.get('de_location', '').upper() == de_location.upper()]
logger.debug(f"System: Spothole Returning {len(spots)} spots after filtering (fetched {fetched_count})")
return spots
def handle_post_dxspot():
time = int(datetime.datetime.utcnow().timestamp())
freq = 14200000 # 14 MHz
comment = "Test spot please ignore"
de_spot = "N0CALL"
dx_spot = "N0CALL"
spot = {"dx_call": dx_spot, "time": time, "freq": freq, "comment": comment, "de_call": de_spot}
try:
success = post_spothole_spot(spot)
if success:
return "Spot posted successfully."
else:
return "Failed to post spot."
except Exception as e:
logger.debug(f"Error in handle_post_dxspot: {e}")
return "Error occurred while posting spot."
def post_spothole_spot(spot):
"""
Posts a new spot to https://spothole.app/api/v1/spot.
"""
url = "https://spothole.app/api/v1/spot"
headers = {"Content-Type": "application/json", "User-Agent": "meshing-around-dxspotter/1.0"}
try:
response = requests.post(url, json=spot, headers=headers, timeout=10)
response.raise_for_status()
logger.debug(f"Spot posted successfully: {response.json()}")
return True
except Exception as e:
logger.debug(f"Error posting spot: {e}")
return False

View File

@@ -1,7 +1,17 @@
# File monitor module for the meshing-around bot
# 2024 Kelly Keeton K7MHI
from modules.log import *
from modules.log import logger
from modules.settings import (
file_monitor_file_path,
news_file_path,
news_random_line_only,
allowXcmd,
bbs_admin_list,
xCmd2factorEnabled,
xCmd2factor_timeout,
enable_runShellCmd
)
import asyncio
import random
import os
@@ -178,6 +188,9 @@ def initNewsSources():
if file.endswith('_news.txt'):
source = file[:-9] # remove _news.txt
newsSourcesList.append(source)
return True
logger.info("FileMon: No news sources found")
return False
#initialize the headlines on startup
initNewsSources()

View File

@@ -9,8 +9,10 @@
- [Tic-Tac-Toe](#tic-tac-toe-game-module)
- [MasterMind](#mastermind-game-module)
- [Video Poker](#video-poker-game-module)
- [Hangman](#hangman-game-module)
- [Quiz](#quiz-game-module)
- [Survey](#survey--module-game)
- [Word of the Day Game](#word-of-the-day-game--rules--features)
---
@@ -538,4 +540,182 @@ Place your Bet, or (L)eave Table.
"turtle",
"lizard",
"snake"
]
]
# Hangman Game Module
A classic word-guessing game for the Meshtastic mesh-bot. Try to guess the hidden word one letter at a time before you run out of chances!
## How to Play
- **Start the Game:**
Send the command `hangman` via DM to the bot to begin a new game.
- **Objective:**
Guess the secret word by suggesting letters, one at a time. Each incorrect guess brings you closer to losing!
- **Game Flow:**
1. **New Game:**
- The bot picks a random word and shows you its masked form (e.g., `_ _ _ _ _`).
- Youll see your total games played and games won.
2. **Guessing:**
- Type a single letter to guess.
- Correct guesses reveal all instances of that letter in the word.
- Incorrect guesses are tracked; you have 6 chances before the game ends.
- The bot shows your progress, wrong guesses, and a hangman emoji status.
3. **Winning & Losing:**
- Guess all letters before reaching 6 wrong guesses to win!
- If you lose, the bot reveals the word and starts a new game.
- **Commands:**
- Enter a single letter to guess.
- Start a new game by sending `hangman` again.
## Example Session
```
_ _ _ _ _ _ _
Guess a letter
🥳
Total Games: 1, Won: 1
M E S H T A S T I C
Guess a letter
```
## Notes
- The word list is loaded from `data/hangman.json` if available, or uses a built-in default list. [\"apple\",\"banana\",\"cherry\"]
- Game stats are tracked per player.
- Only one game session per player at a time.
- Play via DM for best experience.
## Data Files
- `data/hangman.json`: List of words for Hangman.
Example:
```
[
"apple",
"banana",
"cherry"
]
```
## Credits
- Written for Meshtastic mesh-bot by ZR1RF Johannes le Roux 2025
# Quiz Game Module
This module implements a multiplayer quiz game for the Meshtastic mesh-bot.
## How to Play
- **Start the Game:**
The quizmaster starts the quiz session (usually with `/quiz start` or similar command).
- **Join the Game:**
Players join by sending `/quiz join` or by answering a question while a quiz is active.
- **Answer Questions:**
- Use `Q: <answer>` to answer the current question.
- For multiple choice, answer with `A`, `B`, `C`, etc.
- For free-text, type the answer after `Q: `.
- Use `Q: ?` to request the next question.
- **Leave the Game:**
Players can leave at any time with `/quiz leave`.
- **Stop the Game:**
The quizmaster stops the quiz session (e.g., `/quiz stop`). Final scores and the top 3 players are announced.
## Rules & Features
- Only the quizmaster can start or stop the quiz.
- Players can join or leave at any time while the quiz is active.
- Questions are loaded from quiz_questions.json and can be multiple choice or free-text.
- Players earn 1 point for each correct answer.
- The first player to answer each question correctly is noted.
- The top 3 players are displayed at the end of the quiz.
- The quizmaster can broadcast messages to all players.
## Example Commands
- Start quiz:
`/quiz start`
- Join quiz:
`/quiz join`
- Answer a question:
`Q: B`
`Q: Paris`
- Next question:
`Q: ?`
- Leave quiz:
`/quiz leave`
- Stop quiz:
`/quiz stop`
## Notes
- Only one quiz can be active at a time.
- Players can only answer each question once.
- The quizmaster is defined by the `bbs_admin_list` variable.
- Questions must be formatted correctly in the JSON file for the game to function.
---
**Written for Meshtastic mesh-bot by K7MHI Kelly Keeton 2025**
Certainly! Heres documentation for the **Survey Game Module** in the same format as your other game modules:
---
# Survey Module "game"
This module implements a survey system for the Meshtastic mesh-bot.
## How to Play
- **Start the Survey:**
Users start a survey by specifying the survey name (e.g., `/survey start example`).
The survey will prompt the user with the first question.
- **Answer Questions:**
- For multiple choice: reply with a letter (A, B, C, ...).
- For integer: reply with a number.
- For text: reply with your answer as text.
After each answer, the next question is shown automatically.
- **End the Survey:**
The survey ends automatically after the last question, or the user can send `end` to finish early.
Responses are saved to a CSV file.
## Rules & Features
- Surveys are defined in JSON files in surveys (e.g., `example_survey.json`).
- Each survey can have multiple choice, integer, or text questions.
- User responses are saved to a CSV file named `<survey_name>_responses.csv` in the same directory.
- Users can only answer each question once per survey session.
- Survey results can be summarized and reported by the bot.
## Example Commands
- Start a survey:
`/survey start example`
- Answer a multiple choice question:
`A`
- Answer an integer question:
`42`
- Answer a text question:
`My favorite color is blue.`
- End the survey early:
`end`
- Get survey results (admin):
`/survey results example`
## Notes
- Only surveys listed in the surveys directory with the `_survey.json` suffix are available.
- Each users responses are tracked separately.
- Results are summarized and can be displayed by the bot.
---
**Written for Meshtastic mesh-bot by K7MHI Kelly Keeton 2025**

View File

@@ -2,12 +2,12 @@
# Adapted for Meshtastic mesh-bot by K7MHI Kelly Keeton 2024
from random import choices, shuffle
from modules.log import *
from modules.log import logger
from modules.settings import jackTracker
import time
import pickle
jack_starting_cash = 100 # Replace 100 with your desired starting cash value
from modules.settings import jackTracker
SUITS = ("♥️", "♦️", "♠️", "♣️")
RANKS = (

View File

@@ -4,7 +4,7 @@
import random
import time
import pickle
from modules.log import *
from modules.log import logger
# Global variables
total_days = 7 # number of days or rotations the player has to play

View File

@@ -4,7 +4,7 @@
import random
import time
import pickle
from modules.log import *
from modules.log import logger
# Clubs setup
driver_distances = list(range(230, 280, 5))

View File

@@ -9,7 +9,7 @@
import json
import random
import os
from modules.log import *
from modules.log import logger
class HamTest:
def __init__(self):

View File

@@ -1,4 +1,7 @@
# Written for Meshtastic mesh-bot by ZR1RF Johannes le Roux 2025
from modules.log import logger, getPrettyTime
import os
import json
import random
class Hangman:
@@ -118,6 +121,25 @@ class Hangman:
def __init__(self):
self.game = {}
self.DEFAULT_WORDS = self.WORDS
# Try to load hangman.json if it exists
hangman_json_path = os.path.join('data', 'hangman.json')
if os.path.exists(hangman_json_path):
try:
with open(hangman_json_path, 'r') as f:
words = json.load(f)
# Ensure it's a list of strings
if isinstance(words, list) and all(isinstance(w, str) for w in words):
self.WORDS = words
else:
self.WORDS = self.DEFAULT_WORDS
except (FileNotFoundError, json.JSONDecodeError):
logger.warning("Failed to load hangman.json, using default words. example JSON: [\"apple\",\"banana\",\"cherry\"]")
self.WORDS = self.DEFAULT_WORDS
else:
self.WORDS = self.DEFAULT_WORDS
def new_game(self, id):
games = won = 0

View File

@@ -3,7 +3,8 @@
# As a Ham, is this obsecuring the meaning of the joke? Or is it enhancing it?
from dadjokes import Dadjoke # pip install dadjokes
import random
from modules.log import *
from modules.log import logger, getPrettyTime
from modules.settings import dad_jokes_emojiJokes, dad_jokes_enabled
lameJokes = [
"Why don't scientists trust atoms? Because they make up everything!",
@@ -177,10 +178,12 @@ def sendWithEmoji(message):
i += 1
return ' '.join(words)
def tell_joke(nodeID=0, vox=False):
def tell_joke(nodeID=0, vox=False, test=False):
dadjoke = Dadjoke()
if test:
return sendWithEmoji(dadjoke.joke)
try:
if dad_jokes_emojiJokes or vox:
if dad_jokes_emojiJokes:
renderedLaugh = sendWithEmoji(dadjoke.joke)
else:
renderedLaugh = dadjoke.joke

View File

@@ -6,8 +6,8 @@ from random import randrange, uniform # random numbers
from types import SimpleNamespace # namespaces support
import pickle # pickle file support
import time # time functions
from modules.log import * # mesh-bot logging
from modules.log import logger # mesh-bot logging
from modules.system import lemonadeTracker # player tracking
import locale # culture specific locale
import math # math functions
import re # regular expressions
@@ -23,7 +23,6 @@ lemonadeLemons = [{'nodeID': 0, 'cost': 4.00, 'count': 8, 'min': 2.00, 'unit': 0
lemonadeSugar = [{'nodeID': 0, 'cost': 3.00, 'count': 15, 'min': 1.50, 'unit': 0.00}]
lemonadeWeeks = [{'nodeID': 0, 'current': 1, 'total': lemon_total_weeks, 'sales': 99, 'potential': 0, 'unit': 0.00, 'price': 0.00, 'total_sales': 0}]
lemonadeScore = [{'nodeID': 0, 'value': 0.00, 'total': 0.00}]
from modules.settings import lemonadeTracker
def get_sales_amount(potential, unit, price):
"""Gets the sales amount.

View File

@@ -12,7 +12,7 @@ Game Rules:
"""
import pickle
from modules.log import *
from modules.log import logger, getPrettyTime
from datetime import datetime, timedelta
from geopy.distance import geodesic

View File

@@ -4,8 +4,9 @@
import random
import time
import pickle
from modules.log import *
from modules.settings import mindTracker
from modules.log import logger
from modules.system import mindTracker
def chooseDifficultyMMind(message):
usrInput = message.lower()
msg = ''

View File

@@ -11,7 +11,8 @@
import json
import os
import random
from modules.log import *
from modules.log import logger
from modules.settings import bbs_admin_list
QUIZ_JSON = os.path.join(os.path.dirname(__file__), '../', '../', 'data', 'quiz_questions.json')
QUIZMASTER_ID = bbs_admin_list

View File

@@ -1,13 +1,13 @@
# Tic-Tac-Toe game for Meshtastic mesh-bot
# Board positions chosen by numbers 1-9
# 2025
from modules.log import *
import random
import time
import modules.settings as my_settings
# to (max), molly and jake, I miss you both so much.
if disable_emojis_in_games:
if my_settings.disable_emojis_in_games:
X = "X"
O = "O"
else:
@@ -65,7 +65,7 @@ class TicTacToe:
row = ""
for j in range(3):
pos = i * 3 + j
if disable_emojis_in_games:
if my_settings.disable_emojis_in_games:
cell = b[pos] if b[pos] != " " else str(pos + 1)
else:
cell = b[pos] if b[pos] != " " else f" {str(pos + 1)} "
@@ -74,7 +74,6 @@ class TicTacToe:
row += " | "
board_str += row
if i < 2:
#board_str += "\n-+-+-\n"
board_str += "\n"
return board_str + "\n"

View File

@@ -3,12 +3,12 @@
import random
import time
import pickle
from modules.log import *
from modules.log import logger, getPrettyTime
vpStartingCash = 20
from modules.settings import vpTracker
# Define the Card class
class CardVP:
global vpTracker
card_values = { # value of the ace is high until it needs to be low
2: 2,
@@ -296,154 +296,159 @@ def loadHSVp():
return 0
def playVideoPoker(nodeID, message):
global vpTracker, vpStartingCash
msg = ""
try:
# Initialize the player
if getLastCmdVp(nodeID) is None or getLastCmdVp(nodeID) == "":
# create new player if not in tracker
logger.debug(f"System: VideoPoker: New Player {nodeID}")
vpTracker.append({'nodeID': nodeID, 'cmd': 'new', 'time': time.time(), 'cash': vpStartingCash, 'player': None, 'deck': None, 'highScore': 0, 'drawCount': 0})
return f"You have {vpStartingCash} coins, \nWhats your bet?"
# Gather the player's bet
if getLastCmdVp(nodeID) == "new" or getLastCmdVp(nodeID) == "gameOver":
# Initialize shuffled Deck and Player
player = PlayerVP()
deck = DeckVP()
deck.shuffle()
drawCount = 1
bet = 0
msg = ''
# Initialize the player
if getLastCmdVp(nodeID) is None or getLastCmdVp(nodeID=nodeID) == "":
# create new player if not in tracker
logger.debug(f"System: VideoPoker: New Player {nodeID}")
vpTracker.append({'nodeID': nodeID, 'cmd': 'new', 'time': time.time(), 'cash': vpStartingCash, 'player': None, 'deck': None, 'highScore': 0, 'drawCount': 0})
return f"You have {vpStartingCash} coins, \nWhats your bet?"
# Gather the player's bet
if getLastCmdVp(nodeID) == "new" or getLastCmdVp(nodeID) == "gameOver":
# Initialize shuffled Deck and Player
player = PlayerVP()
deck = DeckVP()
deck.shuffle()
drawCount = 1
bet = 0
msg = ''
# load the player bankroll from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
player.bankroll = vpTracker[i]['cash']
vpTracker[i]['time'] = time.time()
# Detect if message is a bet
try:
bet = int(message)
except ValueError:
msg += f"Please enter a valid bet, 1 to 5 coins. you have {player.bankroll} coins."
# Check if bet is valid
if bet > player.bankroll:
msg += f"You can only bet the money you have. {player.bankroll} coins, No strip poker here..."
elif bet < 1:
msg += "You must bet at least 1 coin.🪙"
elif bet > 5:
msg += "The 🎰 coin slot only fits 5 coins max."
# if msg contains an error, return it
if msg is not None and msg != '':
return msg
else:
# Take the bet
player.bet(str(message))
# Bet placed, start the game
setLastCmdVp(nodeID, "playing")
# save player and deck to tracker
# load the player bankroll from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = player
vpTracker[i]['deck'] = deck
vpTracker[i]['cash'] = player.bankroll
player.bankroll = vpTracker[i]['cash']
vpTracker[i]['time'] = time.time()
# Play the game
if getLastCmdVp(nodeID) == "playing":
msg = ''
player.draw_cards(deck)
msg += player.show_hand()
# give hint to player
msg += player.score_hand(resetHand=False)
# save player and deck to tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = player
vpTracker[i]['deck'] = deck
vpTracker[i]['drawCount'] = drawCount
# Detect if message is a bet
try:
bet = int(message)
except ValueError:
msg += f"Please enter a valid bet, 1 to 5 coins. you have {player.bankroll} coins."
msg += f"\nDeal new card? \nex: 1,3,4 or (N)o,(A)ll (H)and"
setLastCmdVp(nodeID, "redraw")
return msg
if getLastCmdVp(nodeID) == "redraw":
msg = ''
# load the player and deck from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
player = vpTracker[i]['player']
deck = vpTracker[i]['deck']
drawCount = vpTracker[i]['drawCount']
# Check if bet is valid
if bet > player.bankroll:
msg += f"You can only bet the money you have. {player.bankroll} coins, No strip poker here..."
elif bet < 1:
msg += "You must bet at least 1 coin.🪙"
elif bet > 5:
msg += "The 🎰 coin slot only fits 5 coins max."
# if msg contains an error, return it
if msg is not None and msg != '':
return msg
else:
# Take the bet
player.bet(str(message))
# Bet placed, start the game
setLastCmdVp(nodeID, "playing")
# if player wants to redraw cards, and not done already
if message.lower().startswith("n"):
setLastCmdVp(nodeID, "endGame")
if message.lower().startswith("h"):
msg = player.show_hand()
return msg
else:
if drawCount <= 1:
msg = player.redraw(deck, message)
if msg.startswith("ex:"):
# if returned error message, return it
return msg
drawCount += 1
# save player and deck to tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = player
vpTracker[i]['deck'] = deck
vpTracker[i]['drawCount'] = drawCount
if drawCount == 2:
# this is the last draw will carry on to endGame for scoring
msg = player.redraw(deck, message) + f"\n"
vpTracker[i]['cash'] = player.bankroll
# Play the game
if getLastCmdVp(nodeID) == "playing":
msg = ''
player.draw_cards(deck)
msg += player.show_hand()
# give hint to player
msg += player.score_hand(resetHand=False)
# save player and deck to tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = player
vpTracker[i]['deck'] = deck
vpTracker[i]['drawCount'] = drawCount
msg += f"\nDeal new card? \nex: 1,3,4 or (N)o,(A)ll (H)and"
setLastCmdVp(nodeID, "redraw")
return msg
if getLastCmdVp(nodeID) == "redraw":
msg = ''
# load the player and deck from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
player = vpTracker[i]['player']
deck = vpTracker[i]['deck']
drawCount = vpTracker[i]['drawCount']
# if player wants to redraw cards, and not done already
if message.lower().startswith("n"):
setLastCmdVp(nodeID, "endGame")
if message.lower().startswith("h"):
msg = player.show_hand()
return msg
else:
if drawCount <= 1:
msg = player.redraw(deck, message)
if msg.startswith("ex:"):
# if returned error message, return it
return msg
# redraw done
setLastCmdVp(nodeID, "endGame")
drawCount += 1
# save player and deck to tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = player
vpTracker[i]['deck'] = deck
vpTracker[i]['drawCount'] = drawCount
if drawCount == 2:
# this is the last draw will carry on to endGame for scoring
msg = player.redraw(deck, message) + f"\n"
if msg.startswith("ex:"):
# if returned error message, return it
return msg
# redraw done
setLastCmdVp(nodeID, "endGame")
else:
# show redrawn hand
return msg
else:
# show redrawn hand
return msg
else:
# redraw already done
setLastCmdVp(nodeID, "endGame")
if getLastCmdVp(nodeID) == "endGame":
# load the player and deck from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
player = vpTracker[i]['player']
deck = vpTracker[i]['deck']
# redraw already done
setLastCmdVp(nodeID, "endGame")
if getLastCmdVp(nodeID) == "endGame":
# load the player and deck from tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
player = vpTracker[i]['player']
deck = vpTracker[i]['deck']
msg += player.score_hand()
msg += player.score_hand()
if player.bankroll < 1:
player.bankroll = vpStartingCash
msg += f"\nLooks 💸 like you're out of money. 💳 resetting ballance 🏧"
elif player.bankroll > vpTracker[i]['highScore']:
vpTracker[i]['highScore'] = player.bankroll
msg += " 🎉HighScore!"
# save high score
saveHSVp(nodeID, vpTracker[i]['highScore'])
if player.bankroll < 1:
player.bankroll = vpStartingCash
msg += f"\nLooks 💸 like you're out of money. 💳 resetting ballance 🏧"
elif player.bankroll > vpTracker[i]['highScore']:
vpTracker[i]['highScore'] = player.bankroll
msg += " 🎉HighScore!"
# save high score
saveHSVp(nodeID, vpTracker[i]['highScore'])
msg += f"\nPlace your Bet, or (L)eave Table."
msg += f"\nPlace your Bet, or (L)eave Table."
setLastCmdVp(nodeID, "gameOver")
# reset player and deck in tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = None
vpTracker[i]['deck'] = None
vpTracker[i]['drawCount'] = 0
# save bankroll
vpTracker[i]['cash'] = player.bankroll
setLastCmdVp(nodeID, "gameOver")
# reset player and deck in tracker
for i in range(len(vpTracker)):
if vpTracker[i]['nodeID'] == nodeID:
vpTracker[i]['player'] = None
vpTracker[i]['deck'] = None
vpTracker[i]['drawCount'] = 0
# save bankroll
vpTracker[i]['cash'] = player.bankroll
return msg
return msg
# At the end of the try block, if nothing returned yet:
return msg if msg else 'No action taken.'
except Exception as e:
logger.warning(f"System: VideoPoker: Error {e}")
return 'No Game in progress'

View File

@@ -1,6 +1,6 @@
# python word of the day game module for meshing-around bot
# 2025 K7MHI Kelly Keeton
from modules.log import *
from modules.log import logger, getPrettyTime
import random
import json
import os

View File

@@ -2,15 +2,16 @@
# K7MHI Kelly Keeton 2024
import json # pip install json
from geopy.geocoders import Nominatim # pip install geopy
import maidenhead as mh # pip install maidenhead
#from geopy.geocoders import Nominatim # pip install geopy
#import maidenhead as mh # pip install maidenhead
import requests # pip install requests
import bs4 as bs # pip install beautifulsoup4
import xml.dom.minidom
from modules.log import *
#import xml.dom.minidom
from modules.log import logger
from modules.settings import urlTimeoutSeconds, NO_ALERTS, myRegionalKeysDE
trap_list_location_eu = ("ukalert", "ukwx", "ukflood")
trap_list_location_de = ("dealert", "dewx", "deflood")
trap_list_location_eu = ("ukalert",)
trap_list_location_de = ("dealert",)
def get_govUK_alerts(lat, lon):
try:
@@ -22,7 +23,7 @@ def get_govUK_alerts(lat, lon):
alert = soup.find('h2', class_='govuk-heading-m', id='alert-status')
except Exception as e:
logger.warning("Error getting UK alerts: " + str(e))
return NO_ALERTS
return
if alert:
return "🚨" + alert.get_text(strip=True)
@@ -47,7 +48,7 @@ def get_nina_alerts():
return NO_ALERTS
def get_wxUKgov():
# get UK weather warnings
# get UK weather warnings, these look icky
url = 'https://www.metoffice.gov.uk/weather/guides/rss'
url = 'https://www.metoffice.gov.uk/public/data/PWSCache/WarningsRSS/Region/nw'
try:
@@ -71,7 +72,62 @@ def get_wxUKgov():
def get_floodUKgov():
# get UK flood warnings
# get UK flood warnings, there is so much I need a locals help
url = 'https://environment.data.gov.uk/flood-widgets/rss/feed-England.xml'
return NO_ALERTS
def get_crimeUKgov(lat, lon):
"""
Fetches recent street crime data from UK Police API for given lat/lon.
Returns a summary string or NO_ALERTS. -- pay for use?
"""
date = datetime.datetime.now().strftime("%Y-%m")
url = f'https://data.police.uk/api/crimes-street/all-crime?date={date}&lat={lat}&lng={lon}'
try:
response = requests.get(url, timeout=urlTimeoutSeconds)
if not response.ok or not response.text.strip():
return NO_ALERTS
crimes = response.json()
if not crimes:
return NO_ALERTS
# Summarize the first few crimes
summaries = []
for crime in crimes[:3]:
category = crime.get("category", "Unknown")
outcome = crime.get("outcome_status", {}).get("category", "No outcome")
location = crime.get("location", {}).get("street", {}).get("name", "Unknown location")
summaries.append(f"{category.title()} at {location} ({outcome})")
return "\n".join(summaries)
except Exception as e:
logger.warning(f"Error fetching UK crime data: {e}")
return NO_ALERTS
def get_crime_stopsUKgov(lat, lon):
"""
Fetches recent stop-and-search data from UK Police API for given lat/lon.
Returns a summary string or NO_ALERTS. -- pay for use?
"""
date = datetime.datetime.now().strftime("%Y-%m")
url = f'https://data.police.uk/api/stops-street?date={date}&lat={lat}&lng={lon}'
try:
response = requests.get(url, timeout=urlTimeoutSeconds)
if not response.ok or not response.text.strip():
return NO_ALERTS
stops = response.json()
if not stops:
return NO_ALERTS
# Summarize the first few stops
summaries = []
for stop in stops[:3]: # Limit to first 3 stops for brevity
summary = (
f"Date: {stop.get('datetime', 'N/A')}, "
f"Outcome: {stop.get('outcome', 'N/A')}, "
f"Ethnicity: {stop.get('self_defined_ethnicity', 'N/A')}, "
f"Gender: {stop.get('gender', 'N/A')}, "
f"Location: {stop.get('location', {}).get('street', {}).get('name', 'N/A')}"
)
summaries.append(summary)
return "\n".join(summaries)
except Exception as e:
return NO_ALERTS

View File

@@ -7,7 +7,7 @@
# https://pythonhosted.org/RPIO/
import RPIO
from modules.log import *
from modules.log import logger, getPrettyTime
trap_list_gpio = ("gpio", "pin", "relay", "switch", "pwm")
# set up input channel without pull-up

423
modules/inventory.md Normal file
View File

@@ -0,0 +1,423 @@
# Inventory & Point of Sale System
## Overview
The inventory module provides a simple point-of-sale (POS) system for mesh networks, enabling inventory management, sales tracking, and cart-based transactions. This system is ideal for:
- Emergency supply management
- Event merchandise sales
- Community supply tracking
- Remote location inventory
- Asset management
- Field operations logistics
- Tool lending in makerspaces or ham swaps
- Tracking and lending shared items like Legos or kits
> **Tool Lending & Shared Item Tracking:**
> The system supports lending out tools or kits (e.g., in a makerspace or ham swap) using the `itemloan` and `itemreturn` commands. You can also track bulk or set-based items like Legos, manage their locations, and log checkouts and returns for community sharing or events.
## Features
### 🏪 Simple POS System
- **Item Management**: Add, remove, and update inventory items
- **Cart System**: Build orders before completing transactions
- **Transaction Logging**: Full audit trail of all sales and returns
- **Price Tracking**: Track price changes over time
- **Location Tracking**: Optional warehouse/location field for items
### 💰 Financial Features
- **Penny Rounding**: USA cash sales support
- Cash sales round down to nearest nickel
- Taxed sales round up to nearest nickel
- **Daily Statistics**: Track sales performance
- **Hot Item Detection**: Identify best-selling products
- **Revenue Tracking**: Daily sales totals
### 📊 Reporting
- **Inventory Value**: Total inventory worth
- **Sales Reports**: Daily transaction summaries
- **Best Sellers**: Most popular items
**Cart System:**
- `cartadd <name> <qty>` - Add to cart
- `cartremove <name>` - Remove from cart
- `cartlist` / `cart` - View cart
- `cartbuy` / `cartsell [notes]` - Complete transaction
- `cartclear` - Empty cart
**Item Management:**
- `itemadd <name> <qty> [price] [loc]` - Add new item
- `itemremove <name>` - Remove item
- `itemreset name> <qty> [price] [loc]` - Update item
- `itemsell <name> <qty> [notes]` - Quick sale
- `itemloan <name> <note>` - Loan/checkout an item
- `itemreturn <transaction_id>` - Reverse transaction
- `itemlist` - View all inventory
- `itemstats` - Daily statistics
## Configuration
Add to your `config.ini`:
```ini
[inventory]
enabled = True
inventory_db = data/inventory.db
# Set to True to disable penny precision and round to nickels (USA cash sales)
# When True: cash sales round down, taxed sales round up to nearest $0.05
# When False (default): normal penny precision ($0.01)
disable_penny = False
```
## Commands Reference
### Item Management
#### Add Item
```
itemadd <name> <price> <quantity> [location]
```
Adds a new item to inventory.
**Examples:**
```
itemadd Radio 149.99 5 Shelf-A
itemadd Battery 12.50 20 Warehouse
itemadd Water 1.00 100
```
#### Remove Item
```
itemremove <name>
```
Removes an item from inventory (also removes from all carts).
**Examples:**
```
itemremove Radio
itemremove "First Aid Kit"
```
#### Update Item
```
itemreset <name> [price=X] [qty=Y]
```
Updates item price and/or quantity.
**Examples:**
```
itemreset Radio price=139.99
itemreset Battery qty=50
itemreset Water price=0.95 qty=200
```
#### Quick Sale
```
itemsell <name> <quantity> [notes]
```
Sell directly without using cart (for quick transactions).
**Examples:**
```
itemsell Battery 2
itemsell Water 10 Emergency supply
itemsell Radio 1 Field unit sale
```
#### Return Transaction
```
itemreturn <transaction_id>
```
Reverse a transaction and return items to inventory.
**Examples:**
```
itemreturn 123
itemreturn 45
```
#### List Inventory
```
itemlist
```
Shows all items with prices, quantities, and total inventory value.
**Example Response:**
```
📦 Inventory:
Radio: $149.99 x 5 @ Shelf-A = $749.95
Battery: $12.50 x 20 @ Warehouse = $250.00
Water: $1.00 x 100 = $100.00
Total Value: $1,099.95
```
#### Statistics
```
itemstats
```
Shows today's sales performance.
**Example Response:**
```
📊 Today's Stats:
Sales: 15
Revenue: $423.50
Hot Item: Battery (8 sold)
```
### Cart System
#### Add to Cart
```
cartadd <name> <quantity>
```
Add items to your shopping cart.
**Examples:**
```
cartadd Radio 2
cartadd Battery 4
cartadd Water 12
```
#### Remove from Cart
```
cartremove <name>
```
Remove items from cart.
**Examples:**
```
cartremove Radio
cartremove Battery
```
#### View Cart
```
cart
cartlist
```
Display your current cart contents and total.
**Example Response:**
```
🛒 Your Cart:
Radio: $149.99 x 2 = $299.98
Battery: $12.50 x 4 = $50.00
Total: $349.98
```
#### Complete Transaction
```
cartbuy [notes]
cartsell [notes]
```
Process the cart as a transaction. Use `cartbuy` for purchases (adds to inventory) or `cartsell` for sales (removes from inventory).
**Examples:**
```
cartsell Customer purchase
cartbuy Restocking supplies
cartsell Event merchandise
```
#### Clear Cart
```
cartclear
```
Empty your shopping cart without completing a transaction.
## Use Cases
### 1. Event Merchandise Sales
Perfect for festivals, hamfests, or community events:
```
# Setup inventory
itemadd Tshirt 20.00 50 Booth-A
itemadd Hat 15.00 30 Booth-A
itemadd Sticker 5.00 100 Booth-B
# Customer transaction
cartadd Tshirt 2
cartadd Hat 1
cartsell Festival sale
# Check daily performance
itemstats
```
### 2. Emergency Supply Tracking
Track supplies during disaster response:
```
# Add emergency supplies
itemadd Water 0.00 500 Warehouse-1
itemadd MRE 0.00 200 Warehouse-1
itemadd Blanket 0.00 100 Warehouse-2
# Distribute supplies
itemsell Water 50 Red Cross distribution
itemsell MRE 20 Family shelter
# Check remaining inventory
itemlist
```
### 3. Field Equipment Management
Manage tools and equipment in remote locations:
```
# Track equipment
itemadd Generator 500.00 3 Base-Camp
itemadd Radio 200.00 10 Equipment-Room
itemadd Battery 15.00 50 Supply-Closet
# Equipment checkout
itemsell Generator 1 Field deployment
itemsell Radio 5 Survey team
# Monitor inventory
itemlist
itemstats
```
### 4. Community Supply Exchange
Facilitate supply exchanges within a community:
```
# Add community items
itemadd Seeds 2.00 100 Community-Garden
itemadd Firewood 10.00 20 Storage-Shed
# Member transactions
cartadd Seeds 5
cartadd Firewood 2
cartsell Member-123 purchase
```
## Penny Rounding (USA Mode)
When `disable_penny = True` is set in the configuration, the system implements penny rounding (disabling penny precision). This follows USA practice where pennies are not commonly used in cash transactions.
### Cash Sales (Round Down)
- $10.47 → $10.45
- $10.48 → $10.45
- $10.49 → $10.45
### Taxed Sales (Round Up)
- $10.47 → $10.50
- $10.48 → $10.50
- $10.49 → $10.50
This follows common USA practice where pennies are not used in cash transactions.
## Database Schema
The system uses SQLite with four tables:
### items
```sql
CREATE TABLE items (
item_id INTEGER PRIMARY KEY AUTOINCREMENT,
item_name TEXT UNIQUE NOT NULL,
item_price REAL NOT NULL,
item_quantity INTEGER NOT NULL DEFAULT 0,
location TEXT,
created_date TEXT,
updated_date TEXT
)
```
### transactions
```sql
CREATE TABLE transactions (
transaction_id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_type TEXT NOT NULL,
transaction_date TEXT NOT NULL,
transaction_time TEXT NOT NULL,
user_name TEXT,
total_amount REAL NOT NULL,
notes TEXT
)
```
### transaction_items
```sql
CREATE TABLE transaction_items (
id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_id INTEGER NOT NULL,
item_id INTEGER NOT NULL,
quantity INTEGER NOT NULL,
price_at_sale REAL NOT NULL,
FOREIGN KEY (transaction_id) REFERENCES transactions(transaction_id),
FOREIGN KEY (item_id) REFERENCES items(item_id)
)
```
### carts
```sql
CREATE TABLE carts (
cart_id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
item_id INTEGER NOT NULL,
quantity INTEGER NOT NULL,
added_date TEXT,
FOREIGN KEY (item_id) REFERENCES items(item_id)
)
```
## Security Considerations
- Users on the `bbs_ban_list` cannot use inventory commands
- Each user has their own cart (identified by node ID)
- Transactions are logged with user information for accountability
- All database operations use parameterized queries to prevent SQL injection
## Tips and Best Practices
1. **Regular Inventory Checks**: Use `itemlist` regularly to monitor stock levels
2. **Descriptive Notes**: Add notes to transactions for better tracking
3. **Location Tags**: Use consistent location naming for better organization
4. **Daily Reviews**: Check `itemstats` at the end of each day
5. **Transaction IDs**: Keep track of transaction IDs for potential returns
6. **Quantity Updates**: Use `itemreset` to adjust inventory after physical counts
7. **Cart Cleanup**: Use `cartclear` if you change your mind before completing a sale
## Troubleshooting
### Item Already Exists
If you get "Item already exists" when using `itemadd`, use `itemreset` instead to update the existing item.
### Insufficient Quantity
If you see "Insufficient quantity" error, check available stock with `itemlist` before attempting the sale.
### Transaction Not Found
If `itemreturn` fails, verify the transaction ID exists. Use recent transaction logs to find valid IDs.
### Cart Not Showing Items
Each user has their own cart. Make sure you're using your own node to view your cart.
## Support
For issues or feature requests, please file an issue on the GitHub repository.

747
modules/inventory.py Normal file
View File

@@ -0,0 +1,747 @@
# Inventory and Point of Sale module for the bot
# K7MHI Kelly Keeton 2024
# Enhanced POS system with cart management and inventory tracking
import sqlite3
from modules.log import logger
from modules.settings import inventory_db, disable_penny, bbs_ban_list
import time
from decimal import Decimal, ROUND_HALF_UP, ROUND_DOWN
trap_list_inventory = ("item", "itemlist", "itemloan", "itemsell", "itemreturn", "itemadd", "itemremove",
"itemreset", "itemstats", "cart", "cartadd", "cartremove", "cartlist",
"cartbuy", "cartsell", "cartclear")
def initialize_inventory_database():
"""Initialize the inventory database with all necessary tables"""
try:
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
# Items table - stores inventory items
logger.debug("System: Inventory: Initializing database...")
c.execute('''CREATE TABLE IF NOT EXISTS items
(item_id INTEGER PRIMARY KEY AUTOINCREMENT,
item_name TEXT UNIQUE NOT NULL,
item_price REAL NOT NULL,
item_quantity INTEGER NOT NULL DEFAULT 0,
location TEXT,
created_date TEXT,
updated_date TEXT)''')
# Transactions table - stores sales/purchases
c.execute('''CREATE TABLE IF NOT EXISTS transactions
(transaction_id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_type TEXT NOT NULL,
transaction_date TEXT NOT NULL,
transaction_time TEXT NOT NULL,
user_name TEXT,
total_amount REAL NOT NULL,
notes TEXT)''')
# Transaction items table - stores items in each transaction
c.execute('''CREATE TABLE IF NOT EXISTS transaction_items
(id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_id INTEGER NOT NULL,
item_id INTEGER NOT NULL,
quantity INTEGER NOT NULL,
price_at_sale REAL NOT NULL,
FOREIGN KEY (transaction_id) REFERENCES transactions(transaction_id),
FOREIGN KEY (item_id) REFERENCES items(item_id))''')
# Carts table - stores temporary shopping carts
c.execute('''CREATE TABLE IF NOT EXISTS carts
(cart_id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
item_id INTEGER NOT NULL,
quantity INTEGER NOT NULL,
added_date TEXT,
FOREIGN KEY (item_id) REFERENCES items(item_id))''')
conn.commit()
conn.close()
logger.info("Inventory: Database initialized successfully")
return True
except Exception as e:
logger.error(f"Inventory: Failed to initialize database: {e}")
return False
def round_price(amount, is_taxed_sale=False):
"""Round price based on penny rounding settings"""
if not disable_penny:
return float(Decimal(str(amount)).quantize(Decimal('0.01'), rounding=ROUND_HALF_UP))
# Penny rounding logic
decimal_amount = Decimal(str(amount))
if is_taxed_sale:
# Round up for taxed sales
return float(decimal_amount.quantize(Decimal('0.05'), rounding=ROUND_HALF_UP))
else:
# Round down for cash sales
return float(decimal_amount.quantize(Decimal('0.05'), rounding=ROUND_DOWN))
def add_item(name, price, quantity=0, location=""):
"""Add a new item to inventory"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
try:
# Check if item already exists
c.execute("SELECT item_id FROM items WHERE item_name = ?", (name,))
existing = c.fetchone()
if existing:
conn.close()
return f"Item '{name}' already exists. Use itemreset to update."
c.execute("""INSERT INTO items (item_name, item_price, item_quantity, location, created_date, updated_date)
VALUES (?, ?, ?, ?, ?, ?)""",
(name, price, quantity, location, current_date, current_date))
conn.commit()
conn.close()
return f"✅ Item added: {name} - ${price:.2f} - Qty: {quantity}"
except sqlite3.OperationalError as e:
if "no such table" in str(e):
initialize_inventory_database()
return add_item(name, price, quantity, location)
else:
conn.close()
logger.error(f"Inventory: Error adding item: {e}")
return "Error adding item."
except Exception as e:
conn.close()
logger.error(f"Inventory: Error adding item: {e}")
return "Error adding item."
def remove_item(name):
"""Remove an item from inventory"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
c.execute("DELETE FROM items WHERE item_name = ?", (name,))
if c.rowcount == 0:
conn.close()
return f"Item '{name}' not found."
conn.commit()
conn.close()
return f"🗑️ Item removed: {name}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error removing item: {e}")
return "Error removing item."
def reset_item(name, price=None, quantity=None):
"""Update item price or quantity"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
try:
# Check if item exists
c.execute("SELECT item_price, item_quantity FROM items WHERE item_name = ?", (name,))
item = c.fetchone()
if not item:
conn.close()
return f"Item '{name}' not found."
updates = []
params = []
if price is not None:
updates.append("item_price = ?")
params.append(price)
if quantity is not None:
updates.append("item_quantity = ?")
params.append(quantity)
if not updates:
conn.close()
return "No updates specified."
updates.append("updated_date = ?")
params.append(current_date)
params.append(name)
query = f"UPDATE items SET {', '.join(updates)} WHERE item_name = ?"
c.execute(query, params)
conn.commit()
conn.close()
update_msg = []
if price is not None:
update_msg.append(f"Price: ${price:.2f}")
if quantity is not None:
update_msg.append(f"Qty: {quantity}")
return f"🔄 Item updated: {name} - {' - '.join(update_msg)}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error resetting item: {e}")
return "Error updating item."
def sell_item(name, quantity, user_name="", notes=""):
"""Sell an item (remove from inventory and record transaction)"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
current_time = time.strftime("%H:%M:%S")
try:
# Get item details
c.execute("SELECT item_id, item_price, item_quantity FROM items WHERE item_name = ?", (name,))
item = c.fetchone()
if not item:
conn.close()
return f"Item '{name}' not found."
item_id, price, current_qty = item
if current_qty < quantity:
conn.close()
return f"Insufficient quantity. Available: {current_qty}"
# Calculate total with rounding
total = round_price(price * quantity, is_taxed_sale=True)
# Create transaction
c.execute("""INSERT INTO transactions (transaction_type, transaction_date, transaction_time,
user_name, total_amount, notes)
VALUES (?, ?, ?, ?, ?, ?)""",
("SALE", current_date, current_time, user_name, total, notes))
transaction_id = c.lastrowid
# Add transaction item
c.execute("""INSERT INTO transaction_items (transaction_id, item_id, quantity, price_at_sale)
VALUES (?, ?, ?, ?)""",
(transaction_id, item_id, quantity, price))
# Update inventory
c.execute("UPDATE items SET item_quantity = item_quantity - ?, updated_date = ? WHERE item_id = ?",
(quantity, current_date, item_id))
conn.commit()
conn.close()
return f"💰 Sale: {quantity}x {name} - Total: ${total:.2f}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error selling item: {e}")
return "Error processing sale."
def return_item(transaction_id):
"""Return items from a transaction (reverse the sale or loan)"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
try:
# Get transaction details
c.execute("SELECT transaction_type FROM transactions WHERE transaction_id = ?", (transaction_id,))
transaction = c.fetchone()
if not transaction:
conn.close()
return f"Transaction {transaction_id} not found."
transaction_type = transaction[0]
# Get items in transaction
c.execute("""SELECT ti.item_id, ti.quantity, i.item_name
FROM transaction_items ti
JOIN items i ON ti.item_id = i.item_id
WHERE ti.transaction_id = ?""", (transaction_id,))
items = c.fetchall()
if not items:
conn.close()
return f"No items found for transaction {transaction_id}."
# Return items to inventory
for item_id, quantity, item_name in items:
c.execute("UPDATE items SET item_quantity = item_quantity + ?, updated_date = ? WHERE item_id = ?",
(quantity, current_date, item_id))
# Remove transaction and transaction_items
c.execute("DELETE FROM transactions WHERE transaction_id = ?", (transaction_id,))
c.execute("DELETE FROM transaction_items WHERE transaction_id = ?", (transaction_id,))
conn.commit()
conn.close()
if transaction_type == "LOAN":
return f"↩️ Loan {transaction_id} returned. Item(s) back in inventory."
else:
return f"↩️ Transaction {transaction_id} reversed. Items returned to inventory."
except Exception as e:
conn.close()
logger.error(f"Inventory: Error returning item: {e}")
return "Error processing return."
def loan_item(name, user_name="", note=""):
"""Loan an item (checkout/loan to someone, record transaction)"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
current_time = time.strftime("%H:%M:%S")
try:
# Get item details
c.execute("SELECT item_id, item_price, item_quantity FROM items WHERE item_name = ?", (name,))
item = c.fetchone()
if not item:
conn.close()
return f"Item '{name}' not found."
item_id, price, current_qty = item
if current_qty < 1:
conn.close()
return f"Insufficient quantity. Available: {current_qty}"
# Create loan transaction (quantity always 1 for now)
c.execute("""INSERT INTO transactions (transaction_type, transaction_date, transaction_time,
user_name, total_amount, notes)
VALUES (?, ?, ?, ?, ?, ?)""",
("LOAN", current_date, current_time, user_name, 0.0, note))
transaction_id = c.lastrowid
# Add transaction item
c.execute("""INSERT INTO transaction_items (transaction_id, item_id, quantity, price_at_sale)
VALUES (?, ?, ?, ?)""",
(transaction_id, item_id, 1, price))
# Update inventory
c.execute("UPDATE items SET item_quantity = item_quantity - 1, updated_date = ? WHERE item_id = ?",
(current_date, item_id))
conn.commit()
conn.close()
return f"🔖 Loaned: {name} (note: {note}) [Transaction #{transaction_id}]"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error loaning item: {e}")
return "Error processing loan."
def get_loans_for_items():
"""Return a dict of item_name -> list of loan notes for currently loaned items"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
# Find all active loans (not returned)
c.execute("""
SELECT i.item_name, t.notes
FROM transactions t
JOIN transaction_items ti ON t.transaction_id = ti.transaction_id
JOIN items i ON ti.item_id = i.item_id
WHERE t.transaction_type = 'LOAN'
""")
rows = c.fetchall()
conn.close()
loans = {}
for item_name, note in rows:
loans.setdefault(item_name, []).append(note)
return loans
except Exception as e:
conn.close()
logger.error(f"Inventory: Error fetching loans: {e}")
return {}
def list_items():
"""List all items in inventory, with loan info if any"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
c.execute("SELECT item_name, item_price, item_quantity, location FROM items ORDER BY item_name")
items = c.fetchall()
conn.close()
if not items:
return "No items in inventory."
# Get loan info
loans = get_loans_for_items()
result = "📦 Inventory:\n"
total_value = 0
for name, price, qty, location in items:
value = price * qty
total_value += value
loc_str = f" @ {location}" if location else ""
loan_str = ""
if name in loans:
for note in loans[name]:
loan_str += f" [loan: {note}]"
result += f"{name}: ${price:.2f} x {qty}{loc_str} = ${value:.2f}{loan_str}\n"
result += f"\nTotal Value: ${total_value:.2f}"
return result.rstrip()
except Exception as e:
conn.close()
logger.error(f"Inventory: Error listing items: {e}")
return "Error listing items."
def get_stats():
"""Get sales statistics"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
current_date = time.strftime("%Y-%m-%d")
# Get today's sales
c.execute("""SELECT COUNT(*), SUM(total_amount)
FROM transactions
WHERE transaction_type = 'SALE' AND transaction_date = ?""",
(current_date,))
today_stats = c.fetchone()
today_count = today_stats[0] or 0
today_total = today_stats[1] or 0
# Get hot item (most sold today)
c.execute("""SELECT i.item_name, SUM(ti.quantity) as total_qty
FROM transaction_items ti
JOIN transactions t ON ti.transaction_id = t.transaction_id
JOIN items i ON ti.item_id = i.item_id
WHERE t.transaction_date = ? AND t.transaction_type = 'SALE'
GROUP BY i.item_name
ORDER BY total_qty DESC
LIMIT 1""", (current_date,))
hot_item = c.fetchone()
conn.close()
result = f"📊 Today's Stats:\n"
result += f"Sales: {today_count}\n"
result += f"Revenue: ${today_total:.2f}\n"
if hot_item:
result += f"Hot Item: {hot_item[0]} ({hot_item[1]} sold)"
else:
result += "Hot Item: None"
return result
except Exception as e:
conn.close()
logger.error(f"Inventory: Error getting stats: {e}")
return "Error getting stats."
def add_to_cart(user_id, item_name, quantity):
"""Add item to user's cart"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
try:
# Get item details
c.execute("SELECT item_id, item_quantity FROM items WHERE item_name = ?", (item_name,))
item = c.fetchone()
if not item:
conn.close()
return f"Item '{item_name}' not found."
item_id, available_qty = item
# Check if item already in cart
c.execute("SELECT quantity FROM carts WHERE user_id = ? AND item_id = ?", (user_id, item_id))
existing = c.fetchone()
if existing:
new_qty = existing[0] + quantity
if new_qty > available_qty:
conn.close()
return f"Insufficient quantity. Available: {available_qty}"
c.execute("UPDATE carts SET quantity = ? WHERE user_id = ? AND item_id = ?",
(new_qty, user_id, item_id))
else:
if quantity > available_qty:
conn.close()
return f"Insufficient quantity. Available: {available_qty}"
c.execute("INSERT INTO carts (user_id, item_id, quantity, added_date) VALUES (?, ?, ?, ?)",
(user_id, item_id, quantity, current_date))
conn.commit()
conn.close()
return f"🛒 Added to cart: {quantity}x {item_name}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error adding to cart: {e}")
return "Error adding to cart."
def remove_from_cart(user_id, item_name):
"""Remove item from user's cart"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
c.execute("""DELETE FROM carts
WHERE user_id = ? AND item_id = (SELECT item_id FROM items WHERE item_name = ?)""",
(user_id, item_name))
if c.rowcount == 0:
conn.close()
return f"Item '{item_name}' not in cart."
conn.commit()
conn.close()
return f"🗑️ Removed from cart: {item_name}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error removing from cart: {e}")
return "Error removing from cart."
def list_cart(user_id):
"""List items in user's cart"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
c.execute("""SELECT i.item_name, i.item_price, c.quantity
FROM carts c
JOIN items i ON c.item_id = i.item_id
WHERE c.user_id = ?""", (user_id,))
items = c.fetchall()
conn.close()
if not items:
return "🛒 Cart is empty."
result = "🛒 Your Cart:\n"
total = 0
for name, price, qty in items:
subtotal = price * qty
total += subtotal
result += f"{name}: ${price:.2f} x {qty} = ${subtotal:.2f}\n"
total = round_price(total, is_taxed_sale=True)
result += f"\nTotal: ${total:.2f}"
return result
except Exception as e:
conn.close()
logger.error(f"Inventory: Error listing cart: {e}")
return "Error listing cart."
def checkout_cart(user_id, user_name="", transaction_type="SALE", notes=""):
"""Process cart as a transaction"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
current_date = time.strftime("%Y-%m-%d")
current_time = time.strftime("%H:%M:%S")
try:
# Get cart items
c.execute("""SELECT i.item_id, i.item_name, i.item_price, c.quantity, i.item_quantity
FROM carts c
JOIN items i ON c.item_id = i.item_id
WHERE c.user_id = ?""", (user_id,))
cart_items = c.fetchall()
if not cart_items:
conn.close()
return "Cart is empty."
# Verify all items have sufficient quantity
for item_id, name, price, cart_qty, stock_qty in cart_items:
if stock_qty < cart_qty:
conn.close()
return f"Insufficient quantity for '{name}'. Available: {stock_qty}"
# Calculate total
total = sum(price * qty for _, _, price, qty, _ in cart_items)
total = round_price(total, is_taxed_sale=(transaction_type == "SALE"))
# Create transaction
c.execute("""INSERT INTO transactions (transaction_type, transaction_date, transaction_time,
user_name, total_amount, notes)
VALUES (?, ?, ?, ?, ?, ?)""",
(transaction_type, current_date, current_time, user_name, total, notes))
transaction_id = c.lastrowid
# Process each item
for item_id, name, price, quantity, _ in cart_items:
# Add to transaction items
c.execute("""INSERT INTO transaction_items (transaction_id, item_id, quantity, price_at_sale)
VALUES (?, ?, ?, ?)""",
(transaction_id, item_id, quantity, price))
# Update inventory (subtract for SALE, add for BUY)
if transaction_type == "SALE":
c.execute("UPDATE items SET item_quantity = item_quantity - ?, updated_date = ? WHERE item_id = ?",
(quantity, current_date, item_id))
else: # BUY
c.execute("UPDATE items SET item_quantity = item_quantity + ?, updated_date = ? WHERE item_id = ?",
(quantity, current_date, item_id))
# Clear cart
c.execute("DELETE FROM carts WHERE user_id = ?", (user_id,))
conn.commit()
conn.close()
emoji = "💰" if transaction_type == "SALE" else "📦"
return f"{emoji} Transaction #{transaction_id} completed: ${total:.2f}"
except Exception as e:
conn.close()
logger.error(f"Inventory: Error processing cart: {e}")
return "Error processing cart."
def clear_cart(user_id):
"""Clear user's cart"""
conn = sqlite3.connect(inventory_db)
c = conn.cursor()
try:
c.execute("DELETE FROM carts WHERE user_id = ?", (user_id,))
conn.commit()
conn.close()
return "🗑️ Cart cleared."
except Exception as e:
conn.close()
logger.error(f"Inventory: Error clearing cart: {e}")
return "Error clearing cart."
def process_inventory_command(nodeID, message, name="none"):
"""Process inventory and POS commands"""
# Check ban list
if str(nodeID) in bbs_ban_list:
logger.warning("System: Inventory attempt from the ban list")
return "Unable to process command"
message_lower = message.lower()
parts = message.split()
try:
# Help command
if "?" in message_lower:
return get_inventory_help()
# Item management commands
if message_lower.startswith("itemadd "):
# itemadd <name> <qty> [price] [location]
if len(parts) < 3:
return "Usage: itemadd <name> <qty> [price] [location]"
item_name = parts[1]
try:
quantity = int(parts[2])
except ValueError:
return "Invalid quantity."
price = 0.0
location = ""
if len(parts) > 3:
try:
price = float(parts[3])
location = " ".join(parts[4:]) if len(parts) > 4 else ""
except ValueError:
# If price is omitted, treat parts[3] as location
price = 0.0
location = " ".join(parts[3:])
return add_item(item_name, price, quantity, location)
elif message_lower.startswith("itemremove "):
item_name = " ".join(parts[1:])
return remove_item(item_name)
elif message_lower.startswith("itemreset "):
# itemreset name [price=X] [quantity=Y]
if len(parts) < 2:
return "Usage: itemreset <name> [price=X] [quantity=Y]"
item_name = parts[1]
price = None
quantity = None
for part in parts[2:]:
if part.startswith("price="):
try:
price = float(part.split("=")[1])
except ValueError:
return "Invalid price value."
elif part.startswith("quantity=") or part.startswith("qty="):
try:
quantity = int(part.split("=")[1])
except ValueError:
return "Invalid quantity value."
return reset_item(item_name, price, quantity)
elif message_lower.startswith("itemsell "):
# itemsell name quantity [notes]
if len(parts) < 3:
return "Usage: itemsell <name> <quantity> [notes]"
item_name = parts[1]
try:
quantity = int(parts[2])
notes = " ".join(parts[3:]) if len(parts) > 3 else ""
return sell_item(item_name, quantity, name, notes)
except ValueError:
return "Invalid quantity."
elif message_lower.startswith("itemreturn "):
# itemreturn transaction_id
if len(parts) < 2:
return "Usage: itemreturn <transaction_id>"
try:
transaction_id = int(parts[1])
return return_item(transaction_id)
except ValueError:
return "Invalid transaction ID."
elif message_lower.startswith("itemloan "):
# itemloan <name> <note>
if len(parts) < 3:
return "Usage: itemloan <name> <note>"
item_name = parts[1]
note = " ".join(parts[2:])
return loan_item(item_name, name, note)
elif message_lower == "itemlist":
return list_items()
elif message_lower == "itemstats":
return get_stats()
# Cart commands
elif message_lower.startswith("cartadd "):
# cartadd name quantity
if len(parts) < 3:
return "Usage: cartadd <name> <quantity>"
item_name = parts[1]
try:
quantity = int(parts[2])
return add_to_cart(str(nodeID), item_name, quantity)
except ValueError:
return "Invalid quantity."
elif message_lower.startswith("cartremove "):
item_name = " ".join(parts[1:])
return remove_from_cart(str(nodeID), item_name)
elif message_lower == "cartlist" or message_lower == "cart":
return list_cart(str(nodeID))
elif message_lower.startswith("cartbuy") or message_lower.startswith("cartsell"):
transaction_type = "BUY" if "buy" in message_lower else "SALE"
notes = " ".join(parts[1:]) if len(parts) > 1 else ""
return checkout_cart(str(nodeID), name, transaction_type, notes)
elif message_lower == "cartclear":
return clear_cart(str(nodeID))
else:
return "Invalid command. Send 'item?' for help."
except Exception as e:
logger.error(f"Inventory: Error processing command: {e}")
return "Error processing command."
def get_inventory_help():
"""Return help text for inventory commands"""
return (
"📦 Inventory Commands:\n"
" itemadd <name> <qty> [price] [loc]\n"
" itemremove <name>\n"
" itemreset name> <qty> [price] [loc]\n"
" itemsell <name> <qty> [notes]\n"
" itemloan <name> <note>\n"
" itemreturn <transaction_id>\n"
" itemlist\n"
" itemstats\n"
"\n"
"🛒 Cart Commands:\n"
" cartadd <name> <qty>\n"
" cartremove <name>\n"
" cartlist\n"
" cartbuy/cartsell [notes]\n"
" cartclear\n"
)

88
modules/llm.md Normal file
View File

@@ -0,0 +1,88 @@
# How do I use this thing?
This is not a full turnkey setup yet?
For Ollama to work, the command line `ollama run 'model'` needs to work properly. Ensure you have enough RAM and your GPU is working as expected. The default model for this project is set to `gemma3:270m`. Ollama can be remote [Ollama Server](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-do-i-configure-ollama-server) works on a pi58GB with 40 second or less response time.
# Ollama local
```bash
# bash
curl -fsSL https://ollama.com/install.sh | sh
# docker
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -e OLLAMA_API_BASE_URL=http://host.docker.internal:11434 open-webui/open-webui
```
## Update /etc/systemd/system/ollama.service
https://github.com/ollama/ollama/issues/703
```ini
#service file addition not config.ini
# [Service]
Environment="OLLAMA_HOST=0.0.0.0:11434"
```
## validation
http://IP::11434
`Ollama is running`
## Docs
Note for LLM in docker with [NVIDIA](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/docker-specialized.html). Needed for the container with ollama running?
---
# OpenWebUI (docker)
```bash
## ollama in docker
docker run -d -p 3000:8080 --gpus all -v open-webui:/app/backend/data --name open-webui ghcr.io/open-webui/open-webui:cuda
## external ollama
docker run -d -p 3000:8080 -e OLLAMA_BASE_URL=https://IP:11434 -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
```
wait for engine to build, update the config.ini for the bot
```ini
# Use OpenWebUI instead of direct Ollama API (enables advanced RAG features)
useOpenWebUI = False
# OpenWebUI server URL (e.g., http://localhost:3000)
openWebUIURL = http://localhost:3000
# OpenWebUI API key/token (required when useOpenWebUI is True)
openWebUIAPIKey = sk-xxxx (see below for help)
```
## Validation
http://IP:3000
make a new admin user.
validate you have models imported or that the system is working for query.
make a new user for the bot
## API Key
- upper right settings for the user
- settings -> account
- get/create the API key for the user
## Troubleshooting
- make sure the OpenWebUI works from the bot node and loads (try lynx etc)
- make sure the model in config.ini is also loaded in OpenWebUI and you can use it
- make sure **OpenWebUI** can reach **Ollama IP** it should auto import the models
- I find using IP and not common use names like localhost which may not work well with docker etc..
- Check OpenWebUI and Ollama are working
- Go to Admin Settings within Open WebUI.
- Connections tab
- Ollama connection and click on the Manage (wrench icon)
- download models directly from the Ollama library
- **Once the model is downloaded or imported, it will become available for use within Open WebUI, allowing you to interact with it through the chat interface**
## Docs
[OpenWebUI Quick Start](https://docs.openwebui.com/getting-started/quick-start/)
[OpenWebUI API](https://docs.openwebui.com/getting-started/api-endpoints)
[OpenWebUI Ollama](https://docs.openwebui.com/getting-started/quick-start/starting-with-ollama/)
[Blog OpenWebUI on Pi](https://pimylifeup.com/raspberry-pi-open-webui/)
https://docs.openwebui.com/tutorials/tips/rag-tutorial#tutorial-configuring-rag-with-open-webui-documentation
https://docs.openwebui.com/features/plugin/
---

View File

@@ -2,30 +2,29 @@
# LLM Module for meshing-around
# This module is used to interact with LLM API to generate responses to user input
# K7MHI Kelly Keeton 2024
from modules.log import *
from modules.log import logger
from modules.settings import (llmModel, ollamaHostName, rawLLMQuery,
llmUseWikiContext, useOpenWebUI, openWebUIURL, openWebUIAPIKey, cmdBang, urlTimeoutSeconds, use_kiwix_server)
# Ollama Client
# https://github.com/ollama/ollama/blob/main/docs/faq.md#how-do-i-configure-ollama-server
import requests
import json
from datetime import datetime
if not rawLLMQuery:
# this may be removed in the future
from googlesearch import search # pip install googlesearch-python
if llmUseWikiContext or use_kiwix_server:
from modules.wiki import get_wikipedia_summary, get_kiwix_summary
# LLM System Variables
ollamaAPI = ollamaHostName + "/api/generate"
openWebUIChatAPI = openWebUIURL + "/api/chat/completions"
openWebUIOllamaProxy = openWebUIURL + "/ollama/api/generate"
tokens = 450 # max charcters for the LLM response, this is the max length of the response also in prompts
requestTruncation = True # if True, the LLM "will" truncate the response
openaiAPI = "https://api.openai.com/v1/completions" # not used, if you do push a enhancement!
requestTruncation = True # if True, the LLM "will" truncate the response
DEBUG_LLM = False # enable debug logging for LLM queries
# Used in the meshBotAI template
llmEnableHistory = True # enable last message history for the LLM model
llmContext_fromGoogle = True # enable context from google search results adds to compute time but really helps with responses accuracy
googleSearchResults = 3 # number of google search results to include in the context more results = more compute time
antiFloodLLM = []
llmChat_history = {}
trap_list_llm = ("ask:", "askai")
@@ -51,24 +50,6 @@ meshBotAI = """
"""
if llmContext_fromGoogle:
meshBotAI = meshBotAI + """
CONTEXT
The following is the location of the user
{location_name}
The following is for context around the prompt to help guide your response.
{context}
"""
else:
meshBotAI = meshBotAI + """
CONTEXT
The following is the location of the user
{location_name}
"""
if llmEnableHistory:
meshBotAI = meshBotAI + """
HISTORY
@@ -100,22 +81,6 @@ def llmTool_math_calculator(expression):
except Exception as e:
return f"Error in calculation: {e}"
def llmTool_get_google(query, num_results=3):
"""
Example tool function to perform a Google search and return results.
:param query: The search query string.
:param num_results: Number of search results to return.
:return: A list of search result titles and descriptions.
"""
results = []
try:
googleSearch = search(query, advanced=True, num_results=num_results)
for result in googleSearch:
results.append(f"{result.title}: {result.description}")
return results
except Exception as e:
return [f"Error in Google search: {e}"]
llmFunctions = [
{
@@ -140,54 +105,176 @@ llmFunctions = [
"required": ["expression"]
}
},
{
"name": "llmTool_get_google",
"description": "Perform a Google search and return results.",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The search query string."
},
"num_results": {
"type": "integer",
"description": "Number of search results to return.",
"default": 3
}
},
"required": ["query"]
}
}
]
def get_google_context(input, num_results):
# Get context from Google search results
googleResults = []
def get_wiki_context(input):
"""
Get context from Wikipedia/Kiwix for RAG enhancement
:param input: The user query
:return: Wikipedia summary or empty string if not available
"""
try:
googleSearch = search(input, advanced=True, num_results=num_results)
if googleSearch:
for result in googleSearch:
googleResults.append(f"{result.title} {result.description}")
else:
googleResults = ['no other context provided']
# Extract potential search terms from the input
# Try to identify key topics/entities for Wikipedia search
search_terms = extract_search_terms(input)
wiki_context = []
for term in search_terms[:2]: # Limit to 2 searches to avoid excessive API calls
if use_kiwix_server:
summary = get_kiwix_summary(term, truncate=False)
else:
summary = get_wikipedia_summary(term, truncate=False)
if summary and "error" not in summary.lower() or "html://" not in summary or "ambiguous" not in summary.lower():
wiki_context.append(f"Wikipedia context for '{term}': {summary}")
return '\n'.join(wiki_context) if wiki_context else ''
except Exception as e:
logger.debug(f"System: LLM Query: context gathering failed, likely due to network issues")
googleResults = ['no other context provided']
return googleResults
logger.debug(f"System: LLM Query: Wiki context gathering failed: {e}")
return ''
def llm_extract_topic(input):
"""
Use LLM to extract the main topic as a single word or short phrase.
Always uses raw mode and supports both Ollama and OpenWebUI.
:param input: The user query
:return: List with one topic string, or empty list on failure
"""
prompt = (
"Summarize the following query into a single word or short phrase that best represents the main topic, "
"for use as a Wikipedia search term. Only return the word or phrase, nothing else:\n"
f"{input}"
)
try:
if useOpenWebUI and openWebUIAPIKey:
result = send_openwebui_query(prompt, max_tokens=10)
else:
llmQuery = {"model": llmModel, "prompt": prompt, "stream": False, "max_tokens": 10}
result = send_ollama_query(llmQuery)
topic = result.strip().split('\n')[0]
topic = topic.strip(' "\'.,!?;:')
if topic:
return [topic]
except Exception as e:
logger.debug(f"LLM topic extraction failed: {e}")
return []
def extract_search_terms(input):
"""
Extract potential search terms from user input.
Enhanced: Try LLM-based topic extraction first, fallback to heuristic.
:param input: The user query
:return: List of potential search terms
"""
# Remove common command prefixes
for trap in trap_list_llm:
if input.lower().startswith(trap):
input = input[len(trap):].strip()
break
# Try LLM-based extraction first
terms = llm_extract_topic(input)
if terms:
return terms
# Fallback: Simple heuristic (existing code)
words = input.split()
search_terms = []
temp_phrase = []
for word in words:
clean_word = word.strip('.,!?;:')
if clean_word and clean_word[0].isupper() and len(clean_word) > 2:
temp_phrase.append(clean_word)
elif temp_phrase:
search_terms.append(' '.join(temp_phrase))
temp_phrase = []
if temp_phrase:
search_terms.append(' '.join(temp_phrase))
if not search_terms:
search_terms = [input.strip()]
if DEBUG_LLM:
logger.debug(f"Extracted search terms: {search_terms}")
return search_terms[:3] # Limit to 3 terms
def send_openwebui_query(prompt, model=None, max_tokens=450, context=''):
"""
Send query to OpenWebUI API for chat completion
:param prompt: The user prompt
:param model: Model name (optional, defaults to llmModel)
:param max_tokens: Max tokens for response
:param context: Additional context to include
:return: Response text or error message
"""
if model is None:
model = llmModel
headers = {
'Authorization': f'Bearer {openWebUIAPIKey}',
'Content-Type': 'application/json'
}
messages = []
if context:
messages.append({
"role": "system",
"content": f"Use the following context to help answer questions:\n{context}"
})
messages.append({
"role": "user",
"content": prompt
})
data = {
"model": model,
"messages": messages,
"max_tokens": max_tokens,
"stream": False
}
# Debug logging
if DEBUG_LLM:
logger.debug(f"OpenWebUI payload: {json.dumps(data)}")
logger.debug(f"OpenWebUI endpoint: {openWebUIChatAPI}")
try:
result = requests.post(openWebUIChatAPI, headers=headers, json=data, timeout=urlTimeoutSeconds * 5)
if DEBUG_LLM:
logger.debug(f"OpenWebUI response status: {result.status_code}")
logger.debug(f"OpenWebUI response text: {result.text}")
if result.status_code == 200:
result_json = result.json()
# OpenWebUI returns OpenAI-compatible format
if 'choices' in result_json and len(result_json['choices']) > 0:
response = result_json['choices'][0]['message']['content']
return response.strip()
else:
logger.warning(f"System: OpenWebUI API returned unexpected format")
return "⛔️ Response Error"
else:
logger.warning(f"System: OpenWebUI API returned status code {result.status_code}")
return f"⛔️ Request Error"
except requests.exceptions.RequestException as e:
logger.warning(f"System: OpenWebUI API request failed: {e}")
return f"⛔️ Request Error"
def send_ollama_query(llmQuery):
# Send the query to the Ollama API and return the response
result = requests.post(ollamaAPI, data=json.dumps(llmQuery))
if result.status_code == 200:
result_json = result.json()
result = result_json.get("response", "")
# deepseek has added <think> </think> tags to the response
if "<think>" in result:
result = result.split("</think>")[1]
else:
raise Exception(f"HTTP Error: {result.status_code}")
return result
try:
result = requests.post(ollamaAPI, data=json.dumps(llmQuery), timeout= urlTimeoutSeconds * 5)
if result.status_code == 200:
result_json = result.json()
result = result_json.get("response", "")
# deepseek has added <think> </think> tags to the response
if "<think>" in result:
result = result.split("</think>")[1]
else:
logger.warning(f"System: LLM Query: Ollama API returned status code {result.status_code}")
return f"⛔️ Request Error"
return result
except requests.exceptions.RequestException as e:
logger.warning(f"System: LLM Query: Ollama API request failed: {e}")
return f"⛔️ Request Error"
def send_ollama_tooling_query(prompt, functions, model=None, max_tokens=450):
"""
@@ -213,24 +300,28 @@ def send_ollama_tooling_query(prompt, functions, model=None, max_tokens=450):
else:
raise Exception(f"HTTP Error: {result.status_code} - {result.text}")
def llm_query(input, nodeID=0, location_name=None):
def llm_query(input, nodeID=0, location_name=None, init=False):
global antiFloodLLM, llmChat_history
googleResults = []
wikiContext = ''
# if this is the first initialization of the LLM the query of " " should bring meshbotAIinit OTA shouldnt reach this?
# This is for LLM like gemma and others now?
if input == " " and rawLLMQuery:
if init and rawLLMQuery:
logger.warning("System: These LLM models lack a traditional system prompt, they can be verbose and not very helpful be advised.")
input = meshbotAIinit
else:
elif init:
input = input.strip()
# classic model for gemma2, deepseek-r1, etc
logger.debug(f"System: Using classic LLM model framework, ideally for gemma2, deepseek-r1, etc")
logger.debug(f"System: Using SYSTEM model framework, ideally for gemma2, deepseek-r1, etc")
if not location_name:
location_name = "no location provided "
# Remove command bang if present
if cmdBang and input.startswith('!'):
input = input.strip('!').strip()
# remove askai: and ask: from the input
# Remove any trap words from the start of the input
for trap in trap_list_llm:
if input.lower().startswith(trap):
input = input[len(trap):].strip()
@@ -245,34 +336,84 @@ def llm_query(input, nodeID=0, location_name=None):
else:
antiFloodLLM.append(nodeID)
if llmContext_fromGoogle and not rawLLMQuery:
googleResults = get_google_context(input, googleSearchResults)
# Get Wikipedia/Kiwix context if enabled (RAG)
if llmUseWikiContext and input != meshbotAIinit:
# get_wiki_context returns a string, but we want to count the items before joining
search_terms = extract_search_terms(input)
wiki_context_list = []
for term in search_terms[:2]:
if not use_kiwix_server:
summary = get_wiki_context(term)
else:
summary = get_wiki_context(term)
if summary and "error" not in summary.lower():
wiki_context_list.append(f"Wikipedia context for '{term}': {summary}")
wikiContext = '\n'.join(wiki_context_list) if wiki_context_list else ''
if wikiContext:
logger.debug(f"System: using Wikipedia/Kiwix context for LLM query got {len(wiki_context_list)} results")
history = llmChat_history.get(nodeID, ["", ""])
if googleResults:
logger.debug(f"System: Google-Enhanced LLM Query: {input} From:{nodeID}")
else:
logger.debug(f"System: LLM Query: {input} From:{nodeID}")
response = ""
result = ""
location_name += f" at the current time of {datetime.now().strftime('%Y-%m-%d %H:%M:%S %Z')}"
try:
if rawLLMQuery:
# sanitize the input to remove tool call syntax
if '```' in input:
logger.warning("System: LLM Query: Code markdown detected, removing for raw query")
input = input.replace('```bash', '').replace('```python', '').replace('```', '')
modelPrompt = input
else:
# Build the query from the template
modelPrompt = meshBotAI.format(input=input, context='\n'.join(googleResults), location_name=location_name, llmModel=llmModel, history=history)
# Use OpenWebUI if enabled
if useOpenWebUI and openWebUIAPIKey:
logger.debug(f"System: LLM Query: Using OpenWebUI API for LLM query {input} From:{nodeID}")
llmQuery = {"model": llmModel, "prompt": modelPrompt, "stream": False, "max_tokens": tokens}
# Query the model via Ollama web API
result = send_ollama_query(llmQuery)
# Combine all context sources
combined_context = []
if wikiContext:
combined_context.append(wikiContext)
context_str = '\n\n'.join(combined_context)
# For OpenWebUI, we send a cleaner prompt
if rawLLMQuery:
result = send_openwebui_query(input, context=context_str, max_tokens=tokens)
else:
# Use the template for non-raw queries
modelPrompt = meshBotAI.format(
input=input,
context=context_str if combined_context else 'no other context provided',
location_name=location_name,
llmModel=llmModel,
history=history
)
result = send_openwebui_query(modelPrompt, max_tokens=tokens)
else:
logger.debug(f"System: LLM Query: Using Ollama API for LLM query {input} From:{nodeID}")
# Use standard Ollama API
if rawLLMQuery:
# sanitize the input to remove tool call syntax
if '```' in input:
logger.warning("System: LLM Query: Code markdown detected, removing for raw query")
input = input.replace('```bash', '').replace('```python', '').replace('```', '')
modelPrompt = input
# Add wiki context to raw queries if available
if wikiContext:
modelPrompt = f"Context:\n{wikiContext}\n\nQuestion: {input}"
else:
# Build the query from the template
all_context = []
if wikiContext:
all_context.append(wikiContext)
context_text = '\n'.join(all_context) if all_context else 'no other context provided'
modelPrompt = meshBotAI.format(
input=input,
context=context_text,
location_name=location_name,
llmModel=llmModel,
history=history
)
llmQuery = {"model": llmModel, "prompt": modelPrompt, "stream": False, "max_tokens": tokens}
# Query the model via Ollama web API
result = send_ollama_query(llmQuery)
#logger.debug(f"System: LLM Response: " + result.strip().replace('\n', ' '))
except Exception as e:
@@ -284,13 +425,17 @@ def llm_query(input, nodeID=0, location_name=None):
response = result.strip().replace('\n', ' ')
if rawLLMQuery and requestTruncation and len(response) > 450:
#retryy loop to truncate the response
# retry loop to truncate the response
logger.warning(f"System: LLM Query: Response exceeded {tokens} characters, requesting truncation")
truncateQuery = {"model": llmModel, "prompt": truncatePrompt + response, "stream": False, "max_tokens": tokens}
truncateResult = send_ollama_query(truncateQuery)
truncate_prompt_full = truncatePrompt + response
if useOpenWebUI and openWebUIAPIKey:
truncateResult = send_openwebui_query(truncate_prompt_full, max_tokens=tokens)
else:
truncateQuery = {"model": llmModel, "prompt": truncate_prompt_full, "stream": False, "max_tokens": tokens}
truncateResult = send_ollama_query(truncateQuery)
# cleanup for message output
response = result.strip().replace('\n', ' ')
response = truncateResult.strip().replace('\n', ' ')
# done with the query, remove the user from the anti flood list
antiFloodLLM.remove(nodeID)

View File

@@ -6,14 +6,15 @@ from geopy.geocoders import Nominatim # pip install geopy
import maidenhead as mh # pip install maidenhead
import requests # pip install requests
import bs4 as bs # pip install beautifulsoup4
import xml.dom.minidom
import xml.dom.minidom # used for parsing XML
import xml.parsers.expat # used for parsing XML
from datetime import datetime
from modules.log import *
from modules.log import logger
import modules.settings as my_settings
import math
import csv
import os
trap_list_location = ("whereami", "wx", "wxa", "wxalert", "rlist", "ea", "ealert", "riverflow", "valert", "earthquake", "howfar", "map",)
def where_am_i(lat=0, lon=0, short=False, zip=False):
@@ -23,7 +24,7 @@ def where_am_i(lat=0, lon=0, short=False, zip=False):
if int(float(lat)) == 0 and int(float(lon)) == 0:
logger.error("Location: No GPS data, try sending location")
return NO_DATA_NOGPS
return my_settings.NO_DATA_NOGPS
# initialize Nominatim API
geolocator = Nominatim(user_agent="mesh-bot")
@@ -43,7 +44,7 @@ def where_am_i(lat=0, lon=0, short=False, zip=False):
whereIam = location.raw['address'].get('postcode', '')
return whereIam
if float(lat) == latitudeValue and float(lon) == longitudeValue:
if float(lat) == my_settings.latitudeValue and float(lon) == my_settings.longitudeValue:
# redacted address when no GPS and using default location
location = geolocator.reverse(str(lat) + ", " + str(lon))
address = location.raw['address']
@@ -72,7 +73,7 @@ def where_am_i(lat=0, lon=0, short=False, zip=False):
return whereIam
except Exception as e:
logger.debug("Location:Error fetching location data with whereami, likely network error")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
def getRepeaterBook(lat=0, lon=0):
grid = mh.to_maiden(float(lat), float(lon))
@@ -90,7 +91,7 @@ def getRepeaterBook(lat=0, lon=0):
try:
msg = ''
user_agent = {'User-agent': 'Mozilla/5.0'}
response = requests.get(repeater_url, headers=user_agent, timeout=urlTimeoutSeconds)
response = requests.get(repeater_url, headers=user_agent, timeout=my_settings.urlTimeoutSeconds)
if response.status_code!=200:
logger.error(f"Location:Error fetching repeater data from {repeater_url} with status code {response.status_code}")
soup = bs.BeautifulSoup(response.text, 'html.parser')
@@ -129,13 +130,13 @@ def getArtSciRepeaters(lat=0, lon=0):
#grid = mh.to_maiden(float(lat), float(lon))
repeaters = []
zipCode = where_am_i(lat, lon, zip=True)
if zipCode == NO_DATA_NOGPS or zipCode == ERROR_FETCHING_DATA:
if zipCode == my_settings.NO_DATA_NOGPS or zipCode == my_settings.ERROR_FETCHING_DATA:
return zipCode
if zipCode.isnumeric():
try:
artsci_url = f"http://www.artscipub.com/mobile/showstate.asp?zip={zipCode}"
response = requests.get(artsci_url, timeout=urlTimeoutSeconds)
response = requests.get(artsci_url, timeout=my_settings.urlTimeoutSeconds)
if response.status_code!=200:
logger.error(f"Location:Error fetching data from {artsci_url} with status code {response.status_code}")
soup = bs.BeautifulSoup(response.text, 'html.parser')
@@ -174,19 +175,20 @@ def getArtSciRepeaters(lat=0, lon=0):
return msg
def get_NOAAtide(lat=0, lon=0):
# get tide data from NOAA for lat/lon
station_id = ""
location = lat,lon
if float(lat) == 0 and float(lon) == 0:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
station_lookup_url = "https://api.tidesandcurrents.noaa.gov/mdapi/prod/webapi/tidepredstations.json?lat=" + str(lat) + "&lon=" + str(lon) + "&radius=50"
try:
station_data = requests.get(station_lookup_url, timeout=urlTimeoutSeconds)
station_data = requests.get(station_lookup_url, timeout=my_settings.urlTimeoutSeconds)
if station_data.ok:
station_json = station_data.json()
else:
logger.error("Location:Error fetching tide station table from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
if station_json['stationList'] == [] or station_json['stationList'] is None:
logger.error("Location:No tide station found")
@@ -196,26 +198,26 @@ def get_NOAAtide(lat=0, lon=0):
except (requests.exceptions.RequestException, json.JSONDecodeError):
logger.error("Location:Error fetching tide station table from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
station_url = "https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?date=today&time_zone=lst_ldt&datum=MLLW&product=predictions&interval=hilo&format=json&station=" + station_id
if use_metric:
if my_settings.use_metric:
station_url += "&units=metric"
else:
station_url += "&units=english"
try:
tide_data = requests.get(station_url, timeout=urlTimeoutSeconds)
tide_data = requests.get(station_url, timeout=my_settings.urlTimeoutSeconds)
if tide_data.ok:
tide_json = tide_data.json()
else:
logger.error("Location:Error fetching tide data from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except (requests.exceptions.RequestException, json.JSONDecodeError):
logger.error("Location:Error fetching tide data from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
tide_data = tide_json['predictions']
@@ -225,7 +227,7 @@ def get_NOAAtide(lat=0, lon=0):
tide_table = "Tide Data for " + tide_date + "\n"
for tide in tide_data:
tide_time = tide['t'].split(" ")[1]
if not zuluTime:
if not my_settings.zuluTime:
# convert to 12 hour clock
if int(tide_time.split(":")[0]) > 12:
tide_time = str(int(tide_time.split(":")[0]) - 12) + ":" + tide_time.split(":")[1] + " PM"
@@ -237,16 +239,17 @@ def get_NOAAtide(lat=0, lon=0):
tide_table = tide_table[:-1]
return tide_table
def get_NOAAweather(lat=0, lon=0, unit=0):
def get_NOAAweather(lat=0, lon=0, unit=0, report_days=None):
# get weather report from NOAA for forecast detailed
weather = ""
location = lat,lon
if float(lat) == 0 and float(lon) == 0:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
if report_days is None:
report_days = my_settings.forecastDuration
# get weather data from NOAA units for metric unit = 1 is metric
if use_metric:
if my_settings.use_metric:
unit = 1
logger.debug("Location: new API metric units not implemented yet")
@@ -254,29 +257,29 @@ def get_NOAAweather(lat=0, lon=0, unit=0):
weather_api = "https://api.weather.gov/points/" + str(lat) + "," + str(lon)
# extract the "forecast": property from the JSON response
try:
weather_data = requests.get(weather_api, timeout=urlTimeoutSeconds)
weather_data = requests.get(weather_api, timeout=my_settings.urlTimeoutSeconds)
if not weather_data.ok:
logger.warning("Location:Error fetching weather data from NOAA for location")
return ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching weather data from NOAA for location")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception:
logger.warning(f"Location:Error fetching weather data error: {Exception}")
return my_settings.ERROR_FETCHING_DATA
# get the forecast URL from the JSON response
weather_json = weather_data.json()
forecast_url = weather_json['properties']['forecast']
try:
forecast_data = requests.get(forecast_url, timeout=urlTimeoutSeconds)
forecast_data = requests.get(forecast_url, timeout=my_settings.urlTimeoutSeconds)
if not forecast_data.ok:
logger.warning("Location:Error fetching weather forecast from NOAA")
return ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching weather forecast from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception:
logger.warning(f"Location:Error fetching weather data error: {Exception}")
return my_settings.ERROR_FETCHING_DATA
# from periods, get the detailedForecast from number of days in NOAAforecastDuration
forecast_json = forecast_data.json()
forecast = forecast_json['properties']['periods']
for day in forecast[:forecastDuration]:
for day in forecast[:report_days]:
# abreviate the forecast
weather += abbreviate_noaa(day['name']) + ": " + abbreviate_noaa(day['detailedForecast']) + "\n"
@@ -286,7 +289,7 @@ def get_NOAAweather(lat=0, lon=0, unit=0):
# get any alerts and return the count
alerts = getWeatherAlertsNOAA(lat, lon)
if alerts == ERROR_FETCHING_DATA or alerts == NO_DATA_NOGPS or alerts == NO_ALERTS:
if alerts == my_settings.ERROR_FETCHING_DATA or alerts == my_settings.NO_DATA_NOGPS or alerts == my_settings.NO_ALERTS:
alert = ""
alert_num = 0
else:
@@ -395,36 +398,36 @@ def getWeatherAlertsNOAA(lat=0, lon=0, useDefaultLatLon=False):
alerts = ""
location = lat,lon
if useDefaultLatLon:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
if float(lat) == 0 and float(lon) == 0 and not useDefaultLatLon:
return NO_DATA_NOGPS
return my_settings.NO_DATA_NOGPS
alert_url = "https://api.weather.gov/alerts/active.atom?point=" + str(lat) + "," + str(lon)
#alert_url = "https://api.weather.gov/alerts/active.atom?area=WA"
#logger.debug("Location:Fetching weather alerts from NOAA for " + str(lat) + ", " + str(lon))
try:
alert_data = requests.get(alert_url, timeout=urlTimeoutSeconds)
alert_data = requests.get(alert_url, timeout=my_settings.urlTimeoutSeconds)
if not alert_data.ok:
logger.warning("Location:Error fetching weather alerts from NOAA")
return ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching weather alerts from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception:
logger.warning(f"Location:Error fetching weather data error: {Exception}")
return my_settings.ERROR_FETCHING_DATA
alerts = ""
alertxml = xml.dom.minidom.parseString(alert_data.text)
for i in alertxml.getElementsByTagName("entry"):
title = i.getElementsByTagName("title")[0].childNodes[0].nodeValue
area_desc = i.getElementsByTagName("cap:areaDesc")[0].childNodes[0].nodeValue
if enableExtraLocationWx:
if my_settings.enableExtraLocationWx:
alerts += f"{title}. {area_desc.replace(' ', '')}\n"
else:
alerts += f"{title}\n"
if alerts == "" or alerts == None:
return NO_ALERTS
return my_settings.NO_ALERTS
# trim off last newline
if alerts[-1] == "\n":
@@ -437,23 +440,23 @@ def getWeatherAlertsNOAA(lat=0, lon=0, useDefaultLatLon=False):
alerts = abbreviate_noaa(alerts)
# return the first ALERT_COUNT alerts
data = "\n".join(alerts.split("\n")[:numWxAlerts]), alert_num
data = "\n".join(alerts.split("\n")[:my_settings.numWxAlerts]), alert_num
return data
wxAlertCacheNOAA = ""
def alertBrodcastNOAA():
# get the latest weather alerts and broadcast them if there are any
global wxAlertCacheNOAA
currentAlert = getWeatherAlertsNOAA(latitudeValue, longitudeValue)
currentAlert = getWeatherAlertsNOAA(my_settings.latitudeValue, my_settings.longitudeValue)
# check if any reason to discard the alerts
if currentAlert == ERROR_FETCHING_DATA or currentAlert == NO_DATA_NOGPS:
if currentAlert == my_settings.ERROR_FETCHING_DATA or currentAlert == my_settings.NO_DATA_NOGPS:
return False
elif currentAlert == NO_ALERTS:
elif currentAlert == my_settings.NO_ALERTS:
wxAlertCacheNOAA = ""
return False
if ignoreEASenable:
if my_settings.ignoreEASenable:
# check if the alert is in the ignoreEAS list
for word in ignoreEASwords:
for word in my_settings.ignoreEASwords:
if word.lower() in currentAlert[0].lower():
logger.debug(f"Location:Ignoring NOAA Alert: {currentAlert[0]} containing {word}")
return False
@@ -471,21 +474,21 @@ def getActiveWeatherAlertsDetailNOAA(lat=0, lon=0):
alerts = ""
location = lat,lon
if float(lat) == 0 and float(lon) == 0:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
alert_url = "https://api.weather.gov/alerts/active.atom?point=" + str(lat) + "," + str(lon)
#alert_url = "https://api.weather.gov/alerts/active.atom?area=WA"
#logger.debug("Location:Fetching weather alerts detailed from NOAA for " + str(lat) + ", " + str(lon))
try:
alert_data = requests.get(alert_url, timeout=urlTimeoutSeconds)
alert_data = requests.get(alert_url, timeout=my_settings.urlTimeoutSeconds)
if not alert_data.ok:
logger.warning("Location:Error fetching weather alerts from NOAA")
return ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching weather alerts from NOAA")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception:
logger.warning(f"Location:Error fetching weather data error: {Exception}")
return my_settings.ERROR_FETCHING_DATA
alerts = ""
alertxml = xml.dom.minidom.parseString(alert_data.text)
@@ -505,10 +508,10 @@ def getActiveWeatherAlertsDetailNOAA(lat=0, lon=0):
alerts = abbreviate_noaa(alerts)
# trim the alerts to the first ALERT_COUNT
alerts = alerts.split("\n***\n")[:numWxAlerts]
alerts = alerts.split("\n***\n")[:my_settings.numWxAlerts]
if alerts == "" or alerts == ['']:
return NO_ALERTS
return my_settings.NO_ALERTS
# trim off last newline
if alerts[-1] == "\n":
@@ -530,13 +533,13 @@ def getIpawsAlert(lat=0, lon=0, shortAlerts = False):
# get the alerts from FEMA
try:
alert_data = requests.get(alert_url, timeout=urlTimeoutSeconds)
alert_data = requests.get(alert_url, timeout=my_settings.urlTimeoutSeconds)
if not alert_data.ok:
logger.warning(f"System: iPAWS fetching IPAWS alerts from FEMA (HTTP {alert_data.status_code})")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception as e:
logger.warning(f"System: iPAWS fetching IPAWS alerts from FEMA failed: {e}")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
# main feed bulletins
alertxml = xml.dom.minidom.parseString(alert_data.text)
@@ -558,13 +561,13 @@ def getIpawsAlert(lat=0, lon=0, shortAlerts = False):
continue
# check if it matches your list
if stateFips not in myStateFIPSList:
#logger.debug(f"Skipping FEMA record link {link} with stateFIPS code of: {stateFips} because it doesn't match our StateFIPSList {myStateFIPSList}")
if stateFips not in my_settings.myStateFIPSList:
#logger.debug(f"Skipping FEMA record link {link} with stateFIPS code of: {stateFips} because it doesn't match our StateFIPSList {my_settings.myStateFIPSList}")
continue # skip to next entry
try:
# get the linked alert data from FEMA
linked_data = requests.get(link, timeout=urlTimeoutSeconds)
linked_data = requests.get(link, timeout=my_settings.urlTimeoutSeconds)
if not linked_data.ok or not linked_data.text.strip():
# if the linked data is not ok, skip this alert
#logger.warning(f"System: iPAWS Error fetching linked alert data from {link}")
@@ -616,14 +619,14 @@ def getIpawsAlert(lat=0, lon=0, shortAlerts = False):
continue
# check if the alert is for the SAME location, if wanted keep alert
if (sameVal in mySAMEList) or (geocode_value in mySAMEList) or mySAMEList == ['']:
if (sameVal in my_settings.mySAMEList) or (geocode_value in my_settings.mySAMEList) or my_settings.mySAMEList == ['']:
ignore_alert = False
if ignoreFEMAenable:
if my_settings.ignoreFEMAenable:
ignore_alert = any(
word.lower() in headline.lower()
for word in ignoreFEMAwords)
for word in my_settings.ignoreFEMAwords)
if ignore_alert:
logger.debug(f"System: Filtering FEMA Alert by WORD: {headline} containing one of {ignoreFEMAwords} at {areaDesc}")
logger.debug(f"System: Filtering FEMA Alert by WORD: {headline} containing one of {my_settings.ignoreFEMAwords} at {areaDesc}")
if ignore_alert:
continue
@@ -643,16 +646,16 @@ def getIpawsAlert(lat=0, lon=0, shortAlerts = False):
# return the numWxAlerts of alerts
if len(alerts) > 0:
for alertItem in alerts[:numWxAlerts]:
for alertItem in alerts[:my_settings.numWxAlerts]:
if shortAlerts:
alert += abbreviate_noaa(f"🚨FEMA Alert: {alertItem['headline']}")
else:
alert += abbreviate_noaa(f"🚨FEMA Alert: {alertItem['headline']}\n{alertItem['description']}")
# add a newline if not the last alert
if alertItem != alerts[:numWxAlerts][-1]:
if alertItem != alerts[:my_settings.numWxAlerts][-1]:
alert += "\n"
else:
alert = NO_ALERTS
alert = my_settings.NO_ALERTS
return alert
@@ -665,22 +668,22 @@ def get_flood_noaa(lat=0, lon=0, uid=None):
headers = {'accept': 'application/json'}
if not uid:
logger.warning(f"Location:No flood gauge data found for UID {uid}")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
try:
response = requests.get(api_url + str(uid), headers=headers, timeout=urlTimeoutSeconds)
response = requests.get(api_url + str(uid), headers=headers, timeout=my_settings.urlTimeoutSeconds)
if not response.ok:
logger.warning(f"Location:Error fetching flood gauge data from NOAA for {uid} (HTTP {response.status_code})")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
data = response.json()
if not data or 'status' not in data:
logger.warning(f"Location:No flood gauge data found for UID {uid}")
return "No flood gauge data found"
except requests.exceptions.RequestException as e:
logger.warning(f"Location:Error fetching flood gauge data from: {api_url}{uid} ({e})")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception as e:
logger.warning(f"Location:Unexpected error: {e}")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
# extract values from JSON safely
try:
@@ -696,35 +699,35 @@ def get_flood_noaa(lat=0, lon=0, uid=None):
return flood_data
except Exception as e:
logger.debug(f"Location:Error extracting flood gauge data from NOAA for {uid}: {e}")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
def get_volcano_usgs(lat=0, lon=0):
alerts = ''
if lat == 0 and lon == 0:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
# get the latest volcano alert from USGS from CAP feed
usgs_volcano_url = "https://volcanoes.usgs.gov/hans-public/api/volcano/getCapElevated"
try:
volcano_data = requests.get(usgs_volcano_url, timeout=urlTimeoutSeconds)
volcano_data = requests.get(usgs_volcano_url, timeout=my_settings.urlTimeoutSeconds)
if not volcano_data.ok:
logger.warning("System: Issue with fetching volcano alerts from USGS")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("System: Issue with fetching volcano alerts from USGS")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
volcano_json = volcano_data.json()
# extract alerts from main feed
if volcano_json and isinstance(volcano_json, list):
for alert in volcano_json:
# check ignore list
if ignoreUSGSEnable:
for word in ignoreUSGSwords:
if my_settings.ignoreUSGSEnable:
for word in my_settings.ignoreUSGSwords:
if word.lower() in alert['volcano_name_appended'].lower():
logger.debug(f"System: Ignoring USGS Alert: {alert['volcano_name_appended']} containing {word}")
continue
# check if the alert lat long is within the range of bot latitudeValue and longitudeValue
if (alert['latitude'] >= latitudeValue - 10 and alert['latitude'] <= latitudeValue + 10) and (alert['longitude'] >= longitudeValue - 10 and alert['longitude'] <= longitudeValue + 10):
if (alert['latitude'] >= my_settings.latitudeValue - 10 and alert['latitude'] <= my_settings.latitudeValue + 10) and (alert['longitude'] >= my_settings.longitudeValue - 10 and alert['longitude'] <= my_settings.longitudeValue + 10):
volcano_name = alert['volcano_name_appended']
alert_level = alert['alert_level']
color_code = alert['color_code']
@@ -737,9 +740,9 @@ def get_volcano_usgs(lat=0, lon=0):
continue
else:
logger.debug("Location:Error fetching volcano data from USGS")
return NO_ALERTS
return my_settings.NO_ALERTS
if alerts == "":
return NO_ALERTS
return my_settings.NO_ALERTS
# trim off last newline
if alerts[-1] == "\n":
alerts = alerts[:-1]
@@ -750,13 +753,13 @@ def get_volcano_usgs(lat=0, lon=0):
def get_nws_marine(zone, days=3):
# forecast from NWS coastal products
try:
marine_pz_data = requests.get(zone, timeout=urlTimeoutSeconds)
marine_pz_data = requests.get(zone, timeout=my_settings.urlTimeoutSeconds)
if not marine_pz_data.ok:
logger.warning("Location:Error fetching NWS Marine PZ data")
return ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching NWS Marine PZ data")
return ERROR_FETCHING_DATA
logger.warning(f"Location:Error fetching NWS Marine data (HTTP {marine_pz_data.status_code})")
return my_settings.ERROR_FETCHING_DATA
except requests.exceptions.RequestException as e:
logger.warning(f"Location:Error fetching NWS Marine data: {e}")
return my_settings.ERROR_FETCHING_DATA
marine_pz_data = marine_pz_data.text
todayDate = datetime.now().strftime("%Y%m%d")
@@ -766,13 +769,13 @@ def get_nws_marine(zone, days=3):
expires_date = expires[:8]
if expires_date < todayDate:
logger.debug("Location: NWS Marine PZ data expired")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except Exception as e:
logger.debug(f"Location: NWS Marine PZ data parse error: {e}")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
else:
logger.debug("Location: NWS Marine PZ data not valid or empty")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
# process the marine forecast data
marine_pzz_lines = marine_pz_data.split("\n")
@@ -794,7 +797,7 @@ def get_nws_marine(zone, days=3):
day_blocks.append(current_block.strip())
# Only keep up to pzDays blocks
for block in day_blocks[:days]:
for block in day_blocks[:my_settings.coastalForecastDays]:
marine_pz_report += block + "\n"
# remove last newline
@@ -808,13 +811,13 @@ def get_nws_marine(zone, days=3):
# abbreviate the report
marine_pz_report = abbreviate_noaa(marine_pz_report)
if marine_pz_report == "":
return NO_DATA_NOGPS
return my_settings.NO_DATA_NOGPS
return marine_pz_report
def checkUSGSEarthQuake(lat=0, lon=0):
if lat == 0 and lon == 0:
lat = latitudeValue
lon = longitudeValue
lat = my_settings.latitudeValue
lon = my_settings.longitudeValue
radius = 100 # km
magnitude = 1.5
history = 7 # days
@@ -824,20 +827,20 @@ def checkUSGSEarthQuake(lat=0, lon=0):
quake_count = 0
# fetch the earthquake data from USGS
try:
quake_data = requests.get(USGSquake_url, timeout=urlTimeoutSeconds)
quake_data = requests.get(USGSquake_url, timeout=my_settings.urlTimeoutSeconds)
if not quake_data.ok:
logger.warning("Location:Error fetching earthquake data from USGS")
return NO_ALERTS
return my_settings.NO_ALERTS
if not quake_data.text.strip():
return NO_ALERTS
return my_settings.NO_ALERTS
try:
quake_xml = xml.dom.minidom.parseString(quake_data.text)
except Exception as e:
logger.warning(f"Location: USGS earthquake API returned invalid XML: {e}")
return NO_ALERTS
return my_settings.NO_ALERTS
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching earthquake data from USGS")
return NO_ALERTS
return my_settings.NO_ALERTS
quake_xml = xml.dom.minidom.parseString(quake_data.text)
quake_count = len(quake_xml.getElementsByTagName("event"))
@@ -853,7 +856,7 @@ def checkUSGSEarthQuake(lat=0, lon=0):
description_text = event.getElementsByTagName("description")[0].getElementsByTagName("text")[0].childNodes[0].nodeValue
largest_mag = round(largest_mag, 1)
if quake_count == 0:
return NO_ALERTS
return my_settings.NO_ALERTS
else:
return f"{quake_count} 🫨quakes in last {history} days within {radius} km. Largest: {largest_mag}M\n{description_text}"
@@ -867,7 +870,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
r = 6371 # Radius of earth in kilometers # haversine formula
if lat == 0 and lon == 0:
return NO_DATA_NOGPS
return my_settings.NO_DATA_NOGPS
if nodeID == 0:
return "No NodeID provided"
@@ -899,7 +902,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
c = 2 * math.asin(math.sqrt(a))
distance_km = c * r
if use_metric:
if my_settings.use_metric:
msg += f"{distance_km:.2f} km"
else:
distance_miles = distance_km * 0.621371
@@ -917,7 +920,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
time_diff = datetime.now() - last_point['time']
if time_diff.total_seconds() > 60:
hours = time_diff.total_seconds() / 3600
if use_metric:
if my_settings.use_metric:
speed = distance_km / hours
speed_str = f"{speed:.2f} km/h"
else:
@@ -941,7 +944,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
total_distance_km += c * r
# add the distance from last point to current point
total_distance_km += distance_km
if use_metric:
if my_settings.use_metric:
msg += f", Total: {total_distance_km:.2f} km"
else:
total_distance_miles = total_distance_km * 0.621371
@@ -974,7 +977,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
area = area * (6378137 ** 2) / 2.0
area = abs(area) / 1e6 # convert to square kilometers
if use_metric:
if my_settings.use_metric:
msg += f", Area: {area:.2f} sq.km (approx)"
else:
area_miles = area * 0.386102
@@ -1006,7 +1009,7 @@ def distance(lat=0,lon=0,nodeID=0, reset=False):
def get_openskynetwork(lat=0, lon=0):
# get the latest aircraft data from OpenSky Network in the area
if lat == 0 and lon == 0:
return NO_ALERTS
return my_settings.NO_ALERTS
# setup a bounding box of 50km around the lat/lon
box_size = 0.45 # approx 50km
# return limits for aircraft search
@@ -1019,18 +1022,19 @@ def get_openskynetwork(lat=0, lon=0):
# fetch the aircraft data from OpenSky Network
opensky_url = f"https://opensky-network.org/api/states/all?lamin={lamin}&lomin={lomin}&lamax={lamax}&lomax={lomax}"
try:
aircraft_data = requests.get(opensky_url, timeout=urlTimeoutSeconds)
aircraft_data = requests.get(opensky_url, timeout=my_settings.urlTimeoutSeconds)
if not aircraft_data.ok:
logger.warning("Location:Error fetching aircraft data from OpenSky Network")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
except (requests.exceptions.RequestException):
logger.warning("Location:Error fetching aircraft data from OpenSky Network")
return ERROR_FETCHING_DATA
return my_settings.ERROR_FETCHING_DATA
aircraft_json = aircraft_data.json()
if 'states' not in aircraft_json or not aircraft_json['states']:
return NO_ALERTS
return my_settings.NO_ALERTS
aircraft_list = aircraft_json['states']
aircraft_report = ""
logger.debug(f"Location: OpenSky Network: Found {len(aircraft_list)} possible aircraft in area")
for aircraft in aircraft_list:
if len(aircraft_report.split("\n")) >= search_limit:
break
@@ -1055,7 +1059,7 @@ def get_openskynetwork(lat=0, lon=0):
if aircraft_report.endswith("\n"):
aircraft_report = aircraft_report[:-1]
aircraft_report = abbreviate_noaa(aircraft_report)
return aircraft_report if aircraft_report else NO_ALERTS
return aircraft_report if aircraft_report else my_settings.NO_ALERTS
def log_locationData_toMap(userID, location, message):
"""

View File

@@ -1,11 +1,11 @@
import logging
from logging.handlers import TimedRotatingFileHandler
from modules.settings import *
import modules.settings as my_settings
# if LOGGING_LEVEL is not set in settings.py, default to DEBUG
if not LOGGING_LEVEL:
LOGGING_LEVEL = "DEBUG"
if not my_settings.LOGGING_LEVEL:
my_settings.LOGGING_LEVEL = "DEBUG"
LOGGING_LEVEL = getattr(logging, LOGGING_LEVEL)
LOGGING_LEVEL = getattr(logging, my_settings.LOGGING_LEVEL)
class CustomFormatter(logging.Formatter):
grey = '\x1b[38;21m'
@@ -70,16 +70,16 @@ stdout_handler.setFormatter(CustomFormatter(logFormat))
# Add handlers to the logger
logger.addHandler(stdout_handler)
if syslog_to_file:
if my_settings.syslog_to_file:
# Create file handler for logging to a file
file_handler_sys = TimedRotatingFileHandler('logs/meshbot.log', when='midnight', backupCount=log_backup_count, encoding='utf-8')
file_handler_sys = TimedRotatingFileHandler('logs/meshbot.log', when='midnight', backupCount=my_settings.log_backup_count, encoding='utf-8')
file_handler_sys.setLevel(LOGGING_LEVEL) # DEBUG used by default for system logs to disk
file_handler_sys.setFormatter(plainFormatter(logFormat))
logger.addHandler(file_handler_sys)
if log_messages_to_file:
if my_settings.log_messages_to_file:
# Create file handler for logging to a file
file_handler = TimedRotatingFileHandler('logs/messages.log', when='midnight', backupCount=log_backup_count, encoding='utf-8')
file_handler = TimedRotatingFileHandler('logs/messages.log', when='midnight', backupCount=my_settings.log_backup_count, encoding='utf-8')
file_handler.setLevel(logging.INFO) # INFO used for messages to disk
file_handler.setFormatter(logging.Formatter(msgLogFormat))
msgLogger.addHandler(file_handler)

View File

@@ -1,18 +1,29 @@
# Module to respomnd to new nodes we havent seen before with a hello message
# K7MHI Kelly Keeton 2024
import os
import sqlite3
from modules.log import *
from modules.log import logger
from modules.settings import qrz_db
def initalize_qrz_database():
# create the database
conn = sqlite3.connect(qrz_db)
c = conn.cursor()
# Check if the qrz table exists, and create it if it doesn't
c.execute('''CREATE TABLE IF NOT EXISTS qrz
(qrz_id INTEGER PRIMARY KEY, qrz_call TEXT, qrz_name TEXT, qrz_qth TEXT, qrz_notes TEXT)''')
conn.commit()
conn.close()
try:
# If the database file doesn't exist, it will be created by sqlite3.connect
if not os.path.exists(qrz_db):
logger.info(f"QRZ database file '{qrz_db}' not found. Creating new database.")
conn = sqlite3.connect(qrz_db)
c = conn.cursor()
# Create the table if it doesn't exist
c.execute('''CREATE TABLE IF NOT EXISTS qrz
(qrz_id INTEGER PRIMARY KEY, qrz_call TEXT, qrz_name TEXT, qrz_qth TEXT, qrz_notes TEXT)''')
conn.commit()
return True
except sqlite3.Error as e:
logger.error(f"Error initializing QRZ database: {e}")
return False
finally:
if 'conn' in locals():
conn.close()
def never_seen_before(nodeID):
# check if we have seen this node before and sent a hello message

View File

@@ -3,14 +3,107 @@
# depends on rigctld running externally as a network service
# also can use VOX detection with a microphone and vosk speech to text to send voice messages to mesh network
# requires vosk and sounddevice python modules. will auto download needed. more from https://alphacephei.com/vosk/models and unpack
# 2024 Kelly Keeton K7MHI
# 2025 Kelly Keeton K7MHI
# WSJT-X and JS8Call UDP Monitoring
# Based on WSJT-X UDP protocol specification
# Reference: https://github.com/ckuhtz/ham/blob/main/mcast/recv_decode.py
from modules.log import *
import asyncio
import socket
import struct
import json
from modules.log import logger
from modules.settings import (
radio_detection_enabled,
rigControlServerAddress,
signalDetectionThreshold,
signalHoldTime,
signalCooldown,
signalCycleLimit,
voxDetectionEnabled,
useLocalVoxModel,
localVoxModelPath,
voxLanguage,
voxInputDevice,
voxTrapList,
voxOnTrapList,
voxEnableCmd,
ERROR_FETCHING_DATA
)
# module global variables
# verbose debug logging for trap words function
debugVoxTmsg = False
# --- WSJT-X and JS8Call Settings Initialization ---
wsjtxMsgQueue = [] # Queue for WSJT-X detected messages
js8callMsgQueue = [] # Queue for JS8Call detected messages
wsjtx_enabled = False
js8call_enabled = False
wsjtx_udp_port = 2237
js8call_udp_port = 2442
watched_callsigns = []
wsjtx_udp_address = '127.0.0.1'
js8call_tcp_address = '127.0.0.1'
js8call_tcp_port = 2442
# WSJT-X UDP Protocol Message Types
WSJTX_HEARTBEAT = 0
WSJTX_STATUS = 1
WSJTX_DECODE = 2
WSJTX_CLEAR = 3
WSJTX_REPLY = 4
WSJTX_QSO_LOGGED = 5
WSJTX_CLOSE = 6
WSJTX_REPLAY = 7
WSJTX_HALT_TX = 8
WSJTX_FREE_TEXT = 9
WSJTX_WSPR_DECODE = 10
WSJTX_LOCATION = 11
WSJTX_LOGGED_ADIF = 12
try:
from modules.settings import (
wsjtx_detection_enabled,
wsjtx_udp_server_address,
wsjtx_watched_callsigns,
js8call_detection_enabled,
js8call_server_address,
js8call_watched_callsigns
)
wsjtx_enabled = wsjtx_detection_enabled
js8call_enabled = js8call_detection_enabled
# Use a local list to collect callsigns before assigning to watched_callsigns
callsigns = []
if wsjtx_enabled:
if ':' in wsjtx_udp_server_address:
wsjtx_udp_address, port_str = wsjtx_udp_server_address.split(':')
wsjtx_udp_port = int(port_str)
if wsjtx_watched_callsigns:
callsigns.extend([cs.strip() for cs in wsjtx_watched_callsigns.split(',') if cs.strip()])
if js8call_enabled:
if ':' in js8call_server_address:
js8call_tcp_address, port_str = js8call_server_address.split(':')
js8call_tcp_port = int(port_str)
if js8call_watched_callsigns:
callsigns.extend([cs.strip() for cs in js8call_watched_callsigns.split(',') if cs.strip()])
# Clean up and deduplicate callsigns, uppercase for matching
watched_callsigns = list({cs.upper() for cs in callsigns})
except ImportError:
logger.debug("RadioMon: WSJT-X/JS8Call settings not configured")
except Exception as e:
logger.warning(f"RadioMon: Error loading WSJT-X/JS8Call settings: {e}")
if radio_detection_enabled:
# used by hamlib detection
@@ -100,6 +193,9 @@ def get_freq_common_name(freq):
def get_hamlib(msg="f"):
# get data from rigctld server
if "socket" not in globals():
logger.warning("RadioMon: 'socket' module not imported. Hamlib disabled.")
return ERROR_FETCHING_DATA
try:
rigControlSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
rigControlSocket.settimeout(2)
@@ -243,4 +339,265 @@ async def voxMonitor():
except Exception as e:
logger.error(f"RadioMon: Error in VOX monitor: {e}")
def decode_wsjtx_packet(data):
"""Decode WSJT-X UDP packet according to the protocol specification"""
try:
# WSJT-X uses Qt's QDataStream format (big-endian)
magic = struct.unpack('>I', data[0:4])[0]
if magic != 0xADBCCBDA:
return None
schema_version = struct.unpack('>I', data[4:8])[0]
msg_type = struct.unpack('>I', data[8:12])[0]
offset = 12
# Helper to read Qt QString (4-byte length + UTF-8 data)
def read_qstring(data, offset):
if offset + 4 > len(data):
return "", offset
length = struct.unpack('>I', data[offset:offset+4])[0]
offset += 4
if length == 0xFFFFFFFF: # Null string
return "", offset
if offset + length > len(data):
return "", offset
text = data[offset:offset+length].decode('utf-8', errors='ignore')
return text, offset + length
# Decode DECODE message (type 2)
if msg_type == WSJTX_DECODE:
# Read fields according to WSJT-X protocol
wsjtx_id, offset = read_qstring(data, offset)
# Read other decode fields: new, time, snr, delta_time, delta_frequency, mode, message
if offset + 1 > len(data):
return None
new = struct.unpack('>?', data[offset:offset+1])[0]
offset += 1
if offset + 4 > len(data):
return None
time_val = struct.unpack('>I', data[offset:offset+4])[0]
offset += 4
if offset + 4 > len(data):
return None
snr = struct.unpack('>i', data[offset:offset+4])[0]
offset += 4
if offset + 8 > len(data):
return None
delta_time = struct.unpack('>d', data[offset:offset+8])[0]
offset += 8
if offset + 4 > len(data):
return None
delta_frequency = struct.unpack('>I', data[offset:offset+4])[0]
offset += 4
mode, offset = read_qstring(data, offset)
message, offset = read_qstring(data, offset)
return {
'type': 'decode',
'id': wsjtx_id,
'new': new,
'time': time_val,
'snr': snr,
'delta_time': delta_time,
'delta_frequency': delta_frequency,
'mode': mode,
'message': message
}
# Decode QSO_LOGGED message (type 5)
elif msg_type == WSJTX_QSO_LOGGED:
wsjtx_id, offset = read_qstring(data, offset)
# Read QSO logged fields
if offset + 8 > len(data):
return None
date_off = struct.unpack('>Q', data[offset:offset+8])[0]
offset += 8
if offset + 8 > len(data):
return None
time_off = struct.unpack('>Q', data[offset:offset+8])[0]
offset += 8
dx_call, offset = read_qstring(data, offset)
dx_grid, offset = read_qstring(data, offset)
return {
'type': 'qso_logged',
'id': wsjtx_id,
'dx_call': dx_call,
'dx_grid': dx_grid
}
return None
except Exception as e:
logger.debug(f"RadioMon: Error decoding WSJT-X packet: {e}")
return None
def check_callsign_match(message, callsigns):
"""Check if any watched callsign appears in the message
Uses word boundary matching to avoid false positives like matching
'K7' when looking for 'K7MHI'. Callsigns are expected to be
separated by spaces or be at the start/end of the message.
"""
if not callsigns:
return True # If no filter, accept all
message_upper = message.upper()
# Split message into words for exact matching
words = message_upper.split()
for callsign in callsigns:
callsign_upper = callsign.upper()
# Pre-compute patterns for portable/mobile suffixes
callsign_with_slash = callsign_upper + '/'
callsign_with_dash = callsign_upper + '-'
slash_callsign = '/' + callsign_upper
dash_callsign = '-' + callsign_upper
# Check if callsign appears as a complete word
if callsign_upper in words:
return True
# Check for callsigns in compound forms like "K7MHI/P" or "K7MHI-7"
for word in words:
if (word.startswith(callsign_with_slash) or
word.startswith(callsign_with_dash) or
word.endswith(slash_callsign) or
word.endswith(dash_callsign)):
return True
return False
async def wsjtxMonitor():
"""Monitor WSJT-X UDP broadcasts for decode messages"""
if not wsjtx_enabled:
logger.warning("RadioMon: WSJT-X monitoring called but not enabled")
return
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((wsjtx_udp_address, wsjtx_udp_port))
sock.setblocking(False)
logger.info(f"RadioMon: WSJT-X UDP listener started on {wsjtx_udp_address}:{wsjtx_udp_port}")
if watched_callsigns:
logger.info(f"RadioMon: Watching for callsigns: {', '.join(watched_callsigns)}")
while True:
try:
data, addr = sock.recvfrom(4096)
decoded = decode_wsjtx_packet(data)
if decoded and decoded['type'] == 'decode':
message = decoded['message']
mode = decoded['mode']
snr = decoded['snr']
# Check if message contains watched callsigns
if check_callsign_match(message, watched_callsigns):
msg_text = f"WSJT-X {mode}: {message} (SNR: {snr:+d}dB)"
logger.info(f"RadioMon: {msg_text}")
wsjtxMsgQueue.append(msg_text)
except BlockingIOError:
# No data available
await asyncio.sleep(0.1)
except Exception as e:
logger.debug(f"RadioMon: Error in WSJT-X monitor loop: {e}")
await asyncio.sleep(1)
except Exception as e:
logger.error(f"RadioMon: Error starting WSJT-X monitor: {e}")
async def js8callMonitor():
"""Monitor JS8Call TCP API for messages"""
if not js8call_enabled:
logger.warning("RadioMon: JS8Call monitoring called but not enabled")
return
try:
logger.info(f"RadioMon: JS8Call TCP listener connecting to {js8call_tcp_address}:{js8call_tcp_port}")
if watched_callsigns:
logger.info(f"RadioMon: Watching for callsigns: {', '.join(watched_callsigns)}")
while True:
try:
# Connect to JS8Call TCP API
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect((js8call_tcp_address, js8call_tcp_port))
sock.setblocking(False)
logger.info("RadioMon: Connected to JS8Call API")
buffer = ""
while True:
try:
data = sock.recv(4096)
if not data:
logger.warning("RadioMon: JS8Call connection closed")
break
buffer += data.decode('utf-8', errors='ignore')
# Process complete JSON messages (newline delimited)
while '\n' in buffer:
line, buffer = buffer.split('\n', 1)
if not line.strip():
continue
try:
msg = json.loads(line)
msg_type = msg.get('type', '')
# Handle RX.DIRECTED and RX.ACTIVITY messages
if msg_type in ['RX.DIRECTED', 'RX.ACTIVITY']:
params = msg.get('params', {})
text = params.get('TEXT', '')
from_call = params.get('FROM', '')
snr = params.get('SNR', 0)
if text and check_callsign_match(text, watched_callsigns):
msg_text = f"JS8Call from {from_call}: {text} (SNR: {snr:+d}dB)"
logger.info(f"RadioMon: {msg_text}")
js8callMsgQueue.append(msg_text)
except json.JSONDecodeError:
logger.debug(f"RadioMon: Invalid JSON from JS8Call: {line[:100]}")
except Exception as e:
logger.debug(f"RadioMon: Error processing JS8Call message: {e}")
except BlockingIOError:
await asyncio.sleep(0.1)
except socket.timeout:
await asyncio.sleep(0.1)
except Exception as e:
logger.debug(f"RadioMon: Error in JS8Call receive loop: {e}")
break
sock.close()
logger.warning("RadioMon: JS8Call connection lost, reconnecting in 5s...")
await asyncio.sleep(5)
except socket.timeout:
logger.warning("RadioMon: JS8Call connection timeout, retrying in 5s...")
await asyncio.sleep(5)
except Exception as e:
logger.warning(f"RadioMon: Error connecting to JS8Call: {e}")
await asyncio.sleep(10)
except Exception as e:
logger.error(f"RadioMon: Error starting JS8Call monitor: {e}")
# end of file

View File

@@ -1,11 +1,15 @@
# rss feed module for meshing-around 2025
from modules.log import *
from modules.log import logger
from modules.settings import rssFeedURL, rssFeedNames, rssMaxItems, rssTruncate, urlTimeoutSeconds, ERROR_FETCHING_DATA
import urllib.request
import xml.etree.ElementTree as ET
import html
from html.parser import HTMLParser
import bs4 as bs
# Common User-Agent for all RSS requests
COMMON_USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
class MLStripper(HTMLParser):
def __init__(self):
super().__init__()
@@ -52,48 +56,83 @@ def get_rss_feed(msg):
if "?" in msg_lower:
return f"Fetches the latest {RSS_RETURN_COUNT} entries RSS feeds. Available feeds are: {', '.join(RSS_FEED_NAMES)}. To fetch a specific feed, include its name in your request."
# Fetch and parse the RSS feed
try:
logger.debug(f"Fetching RSS feed from {feed_url} from message '{msg}'")
agent = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
agent = {'User-Agent': COMMON_USER_AGENT}
request = urllib.request.Request(feed_url, headers=agent)
with urllib.request.urlopen(request, timeout=urlTimeoutSeconds) as response:
xml_data = response.read()
root = ET.fromstring(xml_data)
# Try both namespaced and non-namespaced item tags
items = root.findall('.//item')
ns = None
if not items:
# Try to find the namespace dynamically
for elem in root.iter():
if elem.tag.endswith('item'):
ns_uri = elem.tag.split('}')[0].strip('{')
items = root.findall(f'.//{{{ns_uri}}}item')
ns = ns_uri
break
# Find all <item> (RSS) and <entry> (Atom) elements, regardless of namespace
items = []
for elem in root.iter():
if elem.tag.endswith('item') or elem.tag.endswith('entry'):
items.append(elem)
items = items[:RSS_RETURN_COUNT]
if not items:
return "No RSS feed entries found."
logger.debug(f"No RSS or Atom feed entries found in feed xml_data: {xml_data[:500]}...")
return "No RSS or Atom feed entries found."
formatted_entries = []
seen_first3 = set() # Track first 3 words (lowercased) to avoid duplicates
for item in items:
if ns:
title = item.findtext(f'{{{ns}}}title', default='No title')
link = item.findtext(f'{{{ns}}}link', default=None)
description = item.findtext(f'{{{ns}}}description', default='No description')
pub_date = item.findtext(f'{{{ns}}}pubDate', default='No date')
else:
title = item.findtext('title', default='No title')
link = item.findtext('link', default=None)
description = item.findtext('description', default='No description')
pub_date = item.findtext('pubDate', default='No date')
# Helper to try multiple tag names
def find_any(item, tags):
for tag in tags:
val = item.findtext(tag)
if val:
return val
return None
title = find_any(item, [
'title',
'{http://purl.org/rss/1.0/}title',
'{http://www.w3.org/2005/Atom}title'
])
# Atom links are often attributes, not text
link = find_any(item, [
'link',
'{http://purl.org/rss/1.0/}link',
'{http://www.w3.org/2005/Atom}link'
])
if not link:
link_elem = item.find('{http://www.w3.org/2005/Atom}link')
if link_elem is not None and 'href' in link_elem.attrib:
link = link_elem.attrib['href']
description = find_any(item, [
'description',
'{http://purl.org/rss/1.0/}description',
'{http://purl.org/rss/1.0/modules/content/}encoded',
'{http://www.w3.org/2005/Atom}summary',
'{http://www.w3.org/2005/Atom}content'
])
pub_date = find_any(item, [
'pubDate',
'{http://purl.org/dc/elements/1.1/}date',
'{http://www.w3.org/2005/Atom}updated'
])
# Unescape HTML entities and strip tags
description = html.unescape(description)
description = html.unescape(description) if description else ""
description = strip_tags(description)
if len(description) > RSS_TRIM_LENGTH:
description = description[:RSS_TRIM_LENGTH - 3] + "..."
# Duplicate check: use first 3 words of description (or title if description is empty)
text_for_dupe = description if description else (title or "")
first3 = " ".join(text_for_dupe.lower().split()[:3])
if first3 in seen_first3:
continue
seen_first3.add(first3)
formatted_entries.append(f"{title}\n{description}\n")
return "\n".join(formatted_entries)
except Exception as e:
logger.error(f"Error fetching RSS feed from {feed_url}: {e}")
return ERROR_FETCHING_DATA

View File

@@ -1,83 +1,191 @@
# modules/scheduler.py 2025 meshing-around
# Scheduler setup for Mesh Bot
# Scheduler module for mesh_bot
import asyncio
import schedule
from datetime import datetime
from modules.log import logger
from modules.system import send_message
async def setup_scheduler(
async def run_scheduler_loop(interval=1):
logger.debug(f"System: Scheduler loop started Tasks: {len(schedule.jobs)}, Details:{extract_schedule_fields(schedule.get_jobs())}")
try:
last_logged_minute = -1
while True:
try:
# Log scheduled jobs every 20 minutes
now = datetime.now()
if now.minute % 20 == 0 and now.minute != last_logged_minute:
logger.debug(f"System: Scheduled Tasks {len(schedule.jobs)}, Details:{extract_schedule_fields(schedule.get_jobs())}")
last_logged_minute = now.minute
schedule.run_pending()
except Exception as e:
logger.error(f"System: Scheduler loop exception: {e}")
await asyncio.sleep(interval)
except asyncio.CancelledError:
logger.debug("System: Scheduler loop cancelled, shutting down.")
def safe_int(val, default=0, type=""):
try:
return int(val)
except (ValueError, TypeError):
logger.debug(f"System: Scheduler config {type} error '{val}' to int, using default {default}")
return default
def extract_schedule_fields(jobs):
"""
Extracts 'Every ... (last run: [...], next run: ...)' from schedule.get_jobs() output without regex.
"""
jobs_str = str(jobs)
results = []
# Split by '), ' to separate jobs, then add ')' back except last
parts = jobs_str.split('), ')
for i, part in enumerate(parts):
if not part.endswith(')'):
part += ')'
# Find the start of 'Every'
start = part.find('Every')
if start != -1:
# Find the start of 'do <lambda>()'
do_idx = part.find('do ')
if do_idx != -1:
summary = part[start:do_idx].strip()
# Find the (last run: ... next run: ...) part
paren_idx = part.find('(', do_idx)
if paren_idx != -1:
summary += ' ' + part[paren_idx:].strip()
while '<function ' in summary:
f_start = summary.find('<function ')
f_end = summary.find('>', f_start)
if f_end == -1:
break
func_str = summary[f_start+10:f_end]
func_name = func_str.split(' ')[0]
summary = summary[:f_start] + func_name + summary[f_end+1:]
results.append(summary)
return results
def setup_scheduler(
schedulerMotd, MOTD, schedulerMessage, schedulerChannel, schedulerInterface,
schedulerValue, schedulerTime, schedulerInterval, logger, BroadcastScheduler):
schedulerValue, schedulerTime, schedulerInterval):
try:
# Methods imported from mesh_bot for scheduling tasks
from mesh_bot import (
tell_joke,
welcome_message,
handle_wxc,
handle_moon,
handle_sun,
handle_riverFlow,
handle_tide,
handle_satpass,
handleNews,
handle_mwx,
sysinfo,
)
from modules.rss import get_rss_feed
except ImportError as e:
logger.warning(f"Some mesh_bot schedule features are unavailable by option disable in config.ini: {e} comment out the use of these methods in your custom_scheduler.py")
# methods available for custom scheduler messages
from mesh_bot import tell_joke, welcome_message, handle_wxc, handle_moon, handle_sun, handle_riverFlow, handle_tide, handle_satpass
# Setup the scheduler based on configuration
schedulerValue = schedulerValue.lower().strip()
schedulerTime = schedulerTime.strip()
schedulerInterval = schedulerInterval.strip()
schedulerChannel = int(schedulerChannel)
schedulerInterface = int(schedulerInterface)
# Setup the scheduler based on configuration
schedulerChannel = safe_int(schedulerChannel, 0, type="channel")
schedulerInterface = safe_int(schedulerInterface, 1, type="interface")
schedulerIntervalInt = safe_int(schedulerInterval, 5, type="interval")
try:
if schedulerMotd:
scheduler_message = MOTD
else:
scheduler_message = schedulerMessage
scheduler_message = MOTD if schedulerMotd else schedulerMessage
def send_sched_msg():
send_message(scheduler_message, schedulerChannel, 0, schedulerInterface)
# Basic Scheduler Options
basicOptions = ['day', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', 'hour', 'min']
if any(option.lower() in schedulerValue.lower() for option in basicOptions):
# Basic scheduler job to run the schedule see examples below for custom schedules
if schedulerValue.lower() == 'day':
if schedulerTime != '':
schedule.every().day.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
if any(option in schedulerValue for option in basicOptions):
if schedulerValue == 'day':
if schedulerTime:
# Specific time each day
schedule.every().day.at(schedulerTime).do(send_sched_msg)
else:
schedule.every(int(schedulerInterval)).days.do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'mon' in schedulerValue.lower() and schedulerTime != '':
schedule.every().monday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'tue' in schedulerValue.lower() and schedulerTime != '':
schedule.every().tuesday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'wed' in schedulerValue.lower() and schedulerTime != '':
schedule.every().wednesday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'thu' in schedulerValue.lower() and schedulerTime != '':
schedule.every().thursday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'fri' in schedulerValue.lower() and schedulerTime != '':
schedule.every().friday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'sat' in schedulerValue.lower() and schedulerTime != '':
schedule.every().saturday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'sun' in schedulerValue.lower() and schedulerTime != '':
schedule.every().sunday.at(schedulerTime).do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'hour' in schedulerValue.lower():
schedule.every(int(schedulerInterval)).hours.do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
elif 'min' in schedulerValue.lower():
schedule.every(int(schedulerInterval)).minutes.do(lambda: send_message(scheduler_message, schedulerChannel, 0, schedulerInterface))
logger.debug(f"System: Starting the basic scheduler to send '{scheduler_message}' on schedule '{schedulerValue}' every {schedulerInterval} interval at time '{schedulerTime}' on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'joke' in schedulerValue.lower():
# Schedule to send a joke every specified interval
schedule.every(int(schedulerInterval)).minutes.do(lambda: send_message(tell_joke(), schedulerChannel, 0, schedulerInterface))
logger.debug(f"System: Starting the joke scheduler to send a joke every {schedulerInterval} minutes on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'link' in schedulerValue.lower():
# Schedule to send a link message every specified interval
schedule.every(int(schedulerInterval)).hours.do(lambda: send_message(handle_satpass(schedulerInterface, 'link'), schedulerChannel, 0, schedulerInterface))
logger.debug(f"System: Starting the link scheduler to send link messages every {schedulerInterval} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'weather' in schedulerValue.lower():
# Schedule to send weather updates every specified interval
schedule.every(int(schedulerInterval)).hours.do(lambda: send_message(handle_wxc(0, schedulerInterface, 'wx'), schedulerChannel, 0, schedulerInterface))
logger.debug(f"System: Starting the weather scheduler to send weather updates every {schedulerInterval} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'custom' in schedulerValue.lower():
# Import and setup custom schedules from custom_scheduler.py
# Every N days
schedule.every(schedulerIntervalInt).days.do(send_sched_msg)
elif 'mon' in schedulerValue and schedulerTime:
schedule.every().monday.at(schedulerTime).do(send_sched_msg)
elif 'tue' in schedulerValue and schedulerTime:
schedule.every().tuesday.at(schedulerTime).do(send_sched_msg)
elif 'wed' in schedulerValue and schedulerTime:
schedule.every().wednesday.at(schedulerTime).do(send_sched_msg)
elif 'thu' in schedulerValue and schedulerTime:
schedule.every().thursday.at(schedulerTime).do(send_sched_msg)
elif 'fri' in schedulerValue and schedulerTime:
schedule.every().friday.at(schedulerTime).do(send_sched_msg)
elif 'sat' in schedulerValue and schedulerTime:
schedule.every().saturday.at(schedulerTime).do(send_sched_msg)
elif 'sun' in schedulerValue and schedulerTime:
schedule.every().sunday.at(schedulerTime).do(send_sched_msg)
elif 'hour' in schedulerValue:
schedule.every(schedulerIntervalInt).hours.do(send_sched_msg)
elif 'min' in schedulerValue:
schedule.every(schedulerIntervalInt).minutes.do(send_sched_msg)
logger.debug(f"System: Starting the basic scheduler to send '{scheduler_message}' on schedule '{schedulerValue}' every {schedulerIntervalInt} interval at time '{schedulerTime}' on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'joke' in schedulerValue:
schedule.every(schedulerIntervalInt).minutes.do(
lambda: send_message(tell_joke(), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the joke scheduler to send a joke every {schedulerIntervalInt} minutes on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'link' in schedulerValue:
schedule.every(schedulerIntervalInt).hours.do(
lambda: send_message("bbslink MeshBot looking for peers", schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the link scheduler to send link messages every {schedulerIntervalInt} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'weather' in schedulerValue:
schedule.every().day.at(schedulerTime).do(
lambda: send_message(handle_wxc(0, schedulerInterface, 'wx', days=1), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the weather scheduler to send weather updates every {schedulerIntervalInt} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'news' in schedulerValue:
schedule.every(schedulerIntervalInt).hours.do(
lambda: send_message(handleNews(0, schedulerInterface, 'readnews', False), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the news scheduler to send news updates every {schedulerIntervalInt} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'readrss' in schedulerValue:
schedule.every(schedulerIntervalInt).hours.do(
lambda: send_message(get_rss_feed(''), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the RSS scheduler to send RSS feeds every {schedulerIntervalInt} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'mwx' in schedulerValue:
schedule.every().day.at(schedulerTime).do(
lambda: send_message(handle_mwx(0, schedulerInterface, 'mwx'), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the marine weather scheduler to send marine weather updates at {schedulerTime} on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'sysinfo' in schedulerValue:
schedule.every(schedulerIntervalInt).hours.do(
lambda: send_message(sysinfo('', 0, schedulerInterface, False), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the sysinfo scheduler to send system information every {schedulerIntervalInt} hours on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'tide' in schedulerValue:
schedule.every().day.at(schedulerTime).do(
lambda: send_message(handle_tide(0, schedulerInterface, schedulerChannel), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the tide scheduler to send tide information at {schedulerTime} on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'solar' in schedulerValue:
schedule.every().day.at(schedulerTime).do(
lambda: send_message(handle_sun(0, schedulerInterface, schedulerChannel), schedulerChannel, 0, schedulerInterface)
)
logger.debug(f"System: Starting the scheduler to send solar information at {schedulerTime} on Device:{schedulerInterface} Channel:{schedulerChannel}")
elif 'custom' in schedulerValue:
try:
# This file is located in etc/custom_scheduler.py and copied to modules/custom_scheduler.py at install
from modules.custom_scheduler import setup_custom_schedules # type: ignore # pylance
from modules.custom_scheduler import setup_custom_schedules # type: ignore
setup_custom_schedules(
send_message, tell_joke, welcome_message, handle_wxc, MOTD,
schedulerChannel, schedulerInterface)
logger.debug("System: Custom scheduler file imported and custom schedules set up.")
logger.debug(f"System: Starting the custom_scheduler.py ")
schedule.every().monday.at("12:00").do(
lambda: logger.info("System: Scheduled Broadcast Enabled Reminder")
)
except Exception as e:
logger.debug(f"System: Failed to import custom scheduler. {e}")
logger.warning("Custom scheduler file not found or failed to import. cp etc/custom_scheduler.py modules/custom_scheduler.py")
# Start the Broadcast Scheduler
await BroadcastScheduler()
logger.warning("Custom scheduler file not found or failed to import. cp etc/custom_scheduler.template modules/custom_scheduler.py")
except Exception as e:
logger.error(f"System: Scheduler Error {e}")
return True

View File

@@ -32,6 +32,8 @@ cmdHistory = [] # list to hold the command history for lheard and history comman
msg_history = [] # list to hold the message history for the messages command
max_bytes = 200 # Meshtastic has ~237 byte limit, use conservative 200 bytes for message content
voxMsgQueue = [] # queue for VOX detected messages
wsjtxMsgQueue = [] # queue for WSJT-X detected messages
js8callMsgQueue = [] # queue for JS8Call detected messages
# Game trackers
surveyTracker = [] # Survey game tracker
tictactoeTracker = [] # TicTacToe game tracker
@@ -46,6 +48,13 @@ dwPlayerTracker = [] # DopeWars player tracker
jackTracker = [] # Jack game tracker
mindTracker = [] # Mastermind (mmind) game tracker
# Memory Management Constants
MAX_MSG_HISTORY = 250
MAX_CMD_HISTORY = 250
MAX_SEEN_NODES = 1000
CLEANUP_INTERVAL = 86400 # 24 hours in seconds
GAMEDELAY = 3 * CLEANUP_INTERVAL # 3 days in seconds
# Read the config file, if it does not exist, create basic config file
config = configparser.ConfigParser()
config_file = "config.ini"
@@ -118,6 +127,10 @@ if 'qrz' not in config:
config['qrz'] = {'enabled': 'False', 'qrz_db': 'data/qrz.db', 'qrz_hello_string': 'send CMD or DM me for more info.'}
config.write(open(config_file, 'w'))
if 'inventory' not in config:
config['inventory'] = {'enabled': 'False', 'inventory_db': 'data/inventory.db', 'disable_penny': 'False'}
config.write(open(config_file, 'w'))
# interface1 settings
interface1_type = config['interface'].get('type', 'serial')
port1 = config['interface'].get('port', '')
@@ -249,6 +262,10 @@ try:
llmModel = config['general'].get('ollamaModel', 'gemma3:270m') # default gemma3:270m
rawLLMQuery = config['general'].getboolean('rawLLMQuery', True) #default True
llmReplyToNonCommands = config['general'].getboolean('llmReplyToNonCommands', True) # default True
llmUseWikiContext = config['general'].getboolean('llmUseWikiContext', False) # default False
useOpenWebUI = config['general'].getboolean('useOpenWebUI', False) # default False
openWebUIURL = config['general'].get('openWebUIURL', 'http://localhost:3000') # default localhost:3000
openWebUIAPIKey = config['general'].get('openWebUIAPIKey', '') # default empty
dont_retry_disconnect = config['general'].getboolean('dont_retry_disconnect', False) # default False, retry on disconnect
favoriteNodeList = config['general'].get('favoriteNodeList', '').split(',')
enableEcho = config['general'].getboolean('enableEcho', False) # default False
@@ -298,6 +315,7 @@ try:
n2yoAPIKey = config['location'].get('n2yoAPIKey', '') # default empty
satListConfig = config['location'].get('satList', '25544').split(',') # default 25544 ISS
riverListDefault = config['location'].get('riverList', '').split(',') # default None
useTidePredict = config['location'].getboolean('useTidePredict', False) # default False use NOAA
coastalEnabled = config['location'].getboolean('coastalEnabled', False) # default False
myCoastalZone = config['location'].get('myCoastalZone', None) # default None
coastalForecastDays = config['location'].getint('coastalForecastDays', 3) # default 3 days
@@ -345,6 +363,11 @@ try:
qrz_hello_string = config['qrz'].get('qrz_hello_string', 'MeshBot says Hello! DM for more info.')
train_qrz = config['qrz'].getboolean('training', True)
# inventory and POS
inventory_enabled = config['inventory'].getboolean('enabled', False)
inventory_db = config['inventory'].get('inventory_db', 'data/inventory.db')
disable_penny = config['inventory'].getboolean('disable_penny', False)
# E-Mail Settings
sysopEmails = config['smtp'].get('sysopEmails', '').split(',')
enableSMTP = config['smtp'].getboolean('enableSMTP', False)
@@ -378,6 +401,7 @@ try:
# radio monitoring
radio_detection_enabled = config['radioMon'].getboolean('enabled', False)
dxspotter_enabled = config['radioMon'].getboolean('dxspotter_enabled', True) # default True
rigControlServerAddress = config['radioMon'].get('rigControlServerAddress', 'localhost:4532') # default localhost:4532
sigWatchBroadcastCh = config['radioMon'].get('sigWatchBroadcastCh', '2').split(',') # default Channel 2
sigWatchBroadcastInterface = config['radioMon'].getint('sigWatchBroadcastInterface', 1) # default interface 1
@@ -394,6 +418,14 @@ try:
voxOnTrapList = config['radioMon'].getboolean('voxOnTrapList', False) # default False
voxTrapList = config['radioMon'].get('voxTrapList', 'chirpy').split(',') # default chirpy
voxEnableCmd = config['radioMon'].getboolean('voxEnableCmd', True) # default True
# WSJT-X and JS8Call monitoring
wsjtx_detection_enabled = config['radioMon'].getboolean('wsjtxDetectionEnabled', False) # default WSJT-X detection disabled
wsjtx_udp_server_address = config['radioMon'].get('wsjtxUdpServerAddress', '127.0.0.1:2237') # default localhost:2237
wsjtx_watched_callsigns = config['radioMon'].get('wsjtxWatchedCallsigns', '') # default empty (all callsigns)
js8call_detection_enabled = config['radioMon'].getboolean('js8callDetectionEnabled', False) # default JS8Call detection disabled
js8call_server_address = config['radioMon'].get('js8callServerAddress', '127.0.0.1:2442') # default localhost:2442
js8call_watched_callsigns = config['radioMon'].get('js8callWatchedCallsigns', '') # default empty (all callsigns)
# file monitor
file_monitor_enabled = config['fileMon'].getboolean('filemon_enabled', False)

View File

@@ -3,7 +3,12 @@
# https://avtech.com/articles/138/list-of-email-to-sms-addresses/
# 2024 Kelly Keeton K7MHI
from modules.log import *
from modules.log import logger
from modules.settings import (
SMTP_SERVER, SMTP_PORT, SMTP_AUTH, SMTP_USERNAME, SMTP_PASSWORD,
FROM_EMAIL, EMAIL_SUBJECT, enableImap, IMAP_SERVER, IMAP_PORT,
IMAP_USERNAME, IMAP_PASSWORD, IMAP_FOLDER, sysopEmails, bbs_ban_list
)
import pickle
import time
import smtplib

View File

@@ -7,7 +7,10 @@ import xml.dom.minidom
from datetime import datetime
import ephem # pip install pyephem
from datetime import timezone
from modules.log import *
from modules.log import logger, getPrettyTime
from modules.settings import (latitudeValue, longitudeValue, zuluTime,
n2yoAPIKey, urlTimeoutSeconds, use_metric,
ERROR_FETCHING_DATA, NO_DATA_NOGPS, NO_ALERTS)
import math
trap_list_solarconditions = ("sun", "moon", "solar", "hfcond", "satpass", "howtall")

View File

@@ -13,7 +13,8 @@ import os # For file operations
import csv
from datetime import datetime
from collections import Counter
from modules.log import *
from modules.log import logger
from modules.settings import surveyRecordLocation, surveyRecordID
allowedSurveys = [] # List of allowed survey names

View File

@@ -12,7 +12,8 @@ import base64
import contextlib # for suppressing output on watchdog
import io # for suppressing output on watchdog
# homebrew 'modules'
from modules.log import *
from modules.settings import *
from modules.log import logger, getPrettyTime, CustomFormatter
# Global Variables
trap_list = ("cmd","cmd?","bannode",) # base commands
@@ -22,13 +23,6 @@ games_enabled = False
multiPingList = [{'message_from_id': 0, 'count': 0, 'type': '', 'deviceID': 0, 'channel_number': 0, 'startCount': 0}]
interface_retry_count = 3
# Memory Management Constants
MAX_MSG_HISTORY = 250
MAX_CMD_HISTORY = 250
MAX_SEEN_NODES = 1000
CLEANUP_INTERVAL = 86400 # 24 hours in seconds
GAMEDELAY = 3 * CLEANUP_INTERVAL # 3 days in seconds
# Ping Configuration
if ping_enabled:
# ping, pinging, ack, testing, test, pong
@@ -131,6 +125,10 @@ if coastalEnabled:
from modules.locationdata import * # from the spudgunman/meshing-around repo
trap_list = trap_list + ("mwx","tide",)
help_message = help_message + ", mwx, tide"
if useTidePredict:
from modules import xtide
trap_list = trap_list + ("tide",)
help_message = help_message + ", tide"
# BBS Configuration
if bbs_enabled:
@@ -147,11 +145,16 @@ if dad_jokes_enabled:
trap_list = trap_list + ("joke",)
help_message = help_message + ", joke"
if dxspotter_enabled:
from modules.dxspot import handledxcluster
trap_list = trap_list + ("dx",)
help_message = help_message + ", dx"
# Wikipedia Search Configuration
if wikipedia_enabled:
from modules.wiki import * # from the spudgunman/meshing-around repo
trap_list = trap_list + ("wiki:",)
help_message = help_message + ", wiki:"
if wikipedia_enabled or use_kiwix_server:
from modules.wiki import get_wikipedia_summary, get_kiwix_summary, get_wikipedia_summary
trap_list = trap_list + ("wiki",)
help_message = help_message + ", wiki"
# RSS Feed Configuration
if rssEnable:
@@ -261,10 +264,6 @@ if games_enabled is True:
else:
gamesCmdList = ""
# Scheduled Broadcast Configuration
if scheduler_enabled:
import schedule # pip install schedule
# Sentry Configuration
if sentry_enabled:
from math import sqrt
@@ -287,6 +286,12 @@ if checklist_enabled:
trap_list = trap_list + trap_list_checklist # items checkin, checkout, checklist, purgein, purgeout
help_message = help_message + ", checkin, checkout"
# Inventory and POS Configuration
if inventory_enabled:
from modules.inventory import * # from the spudgunman/meshing-around repo
trap_list = trap_list + trap_list_inventory # items item, itemlist, itemsell, etc.
help_message = help_message + ", item, cart"
# Radio Monitor Configuration
if radio_detection_enabled:
from modules.radio import * # from the spudgunman/meshing-around repo
@@ -1114,105 +1119,135 @@ priorVolcanoAlert = ""
priorEmergencyAlert = ""
priorWxAlert = ""
def handleAlertBroadcast(deviceID=1):
global priorVolcanoAlert, priorEmergencyAlert, priorWxAlert
alertUk = NO_ALERTS
alertDe = NO_ALERTS
alertFema = NO_ALERTS
wxAlert = NO_ALERTS
volcanoAlert = NO_ALERTS
alertWx = False
# only allow API call every 20 minutes
# the watchdog will call this function 3 times, seeing possible throttling on the API
clock = datetime.now()
if clock.minute % 20 != 0:
return False
if clock.second > 17:
return False
# check for alerts
if wxAlertBroadcastEnabled:
alertWx = alertBrodcastNOAA()
try:
global priorVolcanoAlert, priorEmergencyAlert, priorWxAlert
alertUk = NO_ALERTS
alertDe = NO_ALERTS
alertFema = NO_ALERTS
wxAlert = NO_ALERTS
volcanoAlert = NO_ALERTS
overdueAlerts = NO_ALERTS
alertWx = False
# only allow API call every 20 minutes
# the watchdog will call this function 3 times, seeing possible throttling on the API
clock = datetime.now()
if clock.minute % 20 != 0:
return False
if clock.second > 17:
return False
# check for alerts
if wxAlertBroadcastEnabled:
alertWx = alertBrodcastNOAA()
if emergencyAlertBrodcastEnabled:
if enableDEalerts:
alertDe = get_nina_alerts()
if enableGBalerts:
alertUk = get_govUK_alerts()
if emergencyAlertBrodcastEnabled:
if enableDEalerts:
alertDe = get_nina_alerts()
if enableGBalerts:
alertUk = get_govUK_alerts()
else:
# default USA alerts
alertFema = getIpawsAlert(latitudeValue,longitudeValue, shortAlerts=True)
if checklist_enabled:
overdueAlerts = format_overdue_alert()
# format alert
if alertWx:
wxAlert = f"🚨 {alertWx[1]} EAS-WX ALERT: {alertWx[0]}"
else:
# default USA alerts
alertFema = getIpawsAlert(latitudeValue,longitudeValue, shortAlerts=True)
wxAlert = False
# format alert
if alertWx:
wxAlert = f"🚨 {alertWx[1]} EAS-WX ALERT: {alertWx[0]}"
else:
wxAlert = False
femaAlert = alertFema
ukAlert = alertUk
deAlert = alertDe
femaAlert = alertFema
ukAlert = alertUk
deAlert = alertDe
if overdueAlerts != NO_ALERTS and overdueAlerts != None:
logger.debug("System: Adding overdue checkin to emergency alerts")
if femaAlert and NO_ALERTS not in femaAlert and ERROR_FETCHING_DATA not in femaAlert:
femaAlert += "\n\n" + overdueAlerts
elif ukAlert and NO_ALERTS not in ukAlert and ERROR_FETCHING_DATA not in ukAlert:
ukAlert += "\n\n" + overdueAlerts
elif deAlert and NO_ALERTS not in deAlert and ERROR_FETCHING_DATA not in deAlert:
deAlert += "\n\n" + overdueAlerts
else:
# only overdue alerts to send
if overdueAlerts != "" and overdueAlerts is not None and overdueAlerts != NO_ALERTS:
if overdueAlerts != priorEmergencyAlert:
priorEmergencyAlert = overdueAlerts
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(overdueAlerts, int(channel), 0, deviceID)
else:
send_message(overdueAlerts, emergencyAlertBroadcastCh, 0, deviceID)
return True
if emergencyAlertBrodcastEnabled:
if NO_ALERTS not in femaAlert and ERROR_FETCHING_DATA not in femaAlert:
if femaAlert != priorEmergencyAlert:
priorEmergencyAlert = femaAlert
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(femaAlert, int(channel), 0, deviceID)
else:
send_message(femaAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if NO_ALERTS not in ukAlert:
if ukAlert != priorEmergencyAlert:
priorEmergencyAlert = ukAlert
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(ukAlert, int(channel), 0, deviceID)
else:
send_message(ukAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if NO_ALERTS not in alertDe:
if deAlert != priorEmergencyAlert:
priorEmergencyAlert = deAlert
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(deAlert, int(channel), 0, deviceID)
else:
send_message(deAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if wxAlertBroadcastEnabled:
if wxAlert:
if wxAlert != priorWxAlert:
priorWxAlert = wxAlert
else:
return False
if isinstance(wxAlertBroadcastChannel, list):
for channel in wxAlertBroadcastChannel:
send_message(wxAlert, int(channel), 0, deviceID)
else:
send_message(wxAlert, wxAlertBroadcastChannel, 0, deviceID)
return True
if volcanoAlertBroadcastEnabled:
volcanoAlert = get_volcano_usgs(latitudeValue, longitudeValue)
if volcanoAlert and NO_ALERTS not in volcanoAlert and ERROR_FETCHING_DATA not in volcanoAlert:
# check if the alert is different from the last one
if volcanoAlert != priorVolcanoAlert:
priorVolcanoAlert = volcanoAlert
if isinstance(volcanoAlertBroadcastChannel, list):
for channel in volcanoAlertBroadcastChannel:
send_message(volcanoAlert, int(channel), 0, deviceID)
if emergencyAlertBrodcastEnabled:
if NO_ALERTS not in femaAlert and ERROR_FETCHING_DATA not in femaAlert:
if femaAlert != priorEmergencyAlert:
priorEmergencyAlert = femaAlert
else:
send_message(volcanoAlert, volcanoAlertBroadcastChannel, 0, deviceID)
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(femaAlert, int(channel), 0, deviceID)
else:
send_message(femaAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if NO_ALERTS not in ukAlert:
if ukAlert != priorEmergencyAlert:
priorEmergencyAlert = ukAlert
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(ukAlert, int(channel), 0, deviceID)
else:
send_message(ukAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if NO_ALERTS not in alertDe:
if deAlert != priorEmergencyAlert:
priorEmergencyAlert = deAlert
else:
return False
if isinstance(emergencyAlertBroadcastCh, list):
for channel in emergencyAlertBroadcastCh:
send_message(deAlert, int(channel), 0, deviceID)
else:
send_message(deAlert, emergencyAlertBroadcastCh, 0, deviceID)
return True
if wxAlertBroadcastEnabled:
if wxAlert:
if wxAlert != priorWxAlert:
priorWxAlert = wxAlert
else:
return False
if isinstance(wxAlertBroadcastChannel, list):
for channel in wxAlertBroadcastChannel:
send_message(wxAlert, int(channel), 0, deviceID)
else:
send_message(wxAlert, wxAlertBroadcastChannel, 0, deviceID)
return True
if volcanoAlertBroadcastEnabled:
volcanoAlert = get_volcano_usgs(latitudeValue, longitudeValue)
if volcanoAlert and NO_ALERTS not in volcanoAlert and ERROR_FETCHING_DATA not in volcanoAlert:
# check if the alert is different from the last one
if volcanoAlert != priorVolcanoAlert:
priorVolcanoAlert = volcanoAlert
if isinstance(volcanoAlertBroadcastChannel, list):
for channel in volcanoAlertBroadcastChannel:
send_message(volcanoAlert, int(channel), 0, deviceID)
else:
send_message(volcanoAlert, volcanoAlertBroadcastChannel, 0, deviceID)
return True
except Exception as e:
logger.error(f"System: Error in handleAlertBroadcast: {e}")
return False
def onDisconnect(interface):
# Handle disconnection of the interface
@@ -1363,6 +1398,7 @@ def initializeMeshLeaderboard():
'longestUptime': {'nodeID': None, 'value': 0, 'timestamp': 0}, # 🕰️
'fastestSpeed': {'nodeID': None, 'value': 0, 'timestamp': 0}, # 🚓
'highestAltitude': {'nodeID': None, 'value': 0, 'timestamp': 0}, # 🚀
'tallestNode': {'nodeID': None, 'value': 0, 'timestamp': 0}, # 🪜
'coldestTemp': {'nodeID': None, 'value': 999, 'timestamp': 0}, # 🥶
'hottestTemp': {'nodeID': None, 'value': -999, 'timestamp': 0}, # 🥵
'worstAirQuality': {'nodeID': None, 'value': 0, 'timestamp': 0}, # 💨
@@ -1398,16 +1434,6 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
# if not a bot ID track it
if nodeID != globals().get(f'myNodeNum{rxNode}') and nodeID != 0:
# consider Meta for most messages leaderboard
node_message_count = meshLeaderboard.get('nodeMessageCounts', {})
node_message_count[nodeID] = node_message_count.get(nodeID, 0) + 1
meshLeaderboard['nodeTMessageCounts'] = node_message_count
if node_message_count[nodeID] > meshLeaderboard['mostTMessages']['value']:
meshLeaderboard['mostTMessages']['value'] = node_message_count[nodeID]
meshLeaderboard['mostTMessages']['nodeID'] = nodeID
meshLeaderboard['mostTMessages']['timestamp'] = time.time()
# consider Meta for highest and weakest DBm
if packet.get('rxSnr') is not None:
dbm = packet['rxSnr']
@@ -1415,6 +1441,20 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
meshLeaderboard['highestDBm'] = {'nodeID': nodeID, 'value': dbm, 'timestamp': time.time()}
if dbm < meshLeaderboard['weakestDBm']['value']:
meshLeaderboard['weakestDBm'] = {'nodeID': nodeID, 'value': dbm, 'timestamp': time.time()}
# Meta for most Messages leaderboard
if packet_type == 'TEXT_MESSAGE':
message_count = meshLeaderboard.get('nodeMessageCounts', {})
message_count[nodeID] = message_count.get(nodeID, 0) + 1
meshLeaderboard['nodeMessageCounts'] = message_count
if message_count[nodeID] > meshLeaderboard['mostMessages']['value']:
meshLeaderboard['mostMessages'] = {'nodeID': nodeID, 'value': message_count[nodeID], 'timestamp': time.time()}
else:
tmessage_count = meshLeaderboard.get('nodeTMessageCounts', {})
tmessage_count[nodeID] = tmessage_count.get(nodeID, 0) + 1
meshLeaderboard['nodeTMessageCounts'] = tmessage_count
if tmessage_count[nodeID] > meshLeaderboard['mostTMessages']['value']:
meshLeaderboard['mostTMessages'] = {'nodeID': nodeID, 'value': tmessage_count[nodeID], 'timestamp': time.time()}
except Exception as e:
logger.debug(f"System: Metadata decode error: Device: {rxNode} Channel: {channel} {e} packet {packet}")
@@ -1424,10 +1464,11 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
if debugMetadata and 'TELEMETRY_APP' not in metadataFilter:
print(f"DEBUG TELEMETRY_APP: {packet}\n\n")
telemetry_packet = packet['decoded']['telemetry']
# Track lowest battery 🪫
# Track device metrics (battery, uptime)
if telemetry_packet.get('deviceMetrics'):
deviceMetrics = telemetry_packet['deviceMetrics']
current_time = time.time()
# Track lowest battery 🪫
try:
if deviceMetrics.get('batteryLevel') is not None:
battery = float(deviceMetrics['batteryLevel'])
@@ -1517,6 +1558,15 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
meshLeaderboard['highestAltitude'] = {'nodeID': nodeID, 'value': altitude, 'timestamp': time.time()}
if logMetaStats:
logger.info(f"System: 🚀 New altitude record: {altitude}m from NodeID:{nodeID} ShortName:{get_name_from_number(nodeID, 'short', rxNode)}")
# Track tallest node 🪜 (under the highfly_altitude limit by 100m)
if position_data.get('altitude') is not None:
altitude = position_data['altitude']
if altitude < (highfly_altitude - 100):
if altitude > meshLeaderboard['tallestNode']['value']:
meshLeaderboard['tallestNode'] = {'nodeID': nodeID, 'value': altitude, 'timestamp': time.time()}
if logMetaStats:
logger.info(f"System: 🪜 New tallest node record: {altitude}m from NodeID:{nodeID} ShortName:{get_name_from_number(nodeID, 'short', rxNode)}")
# if altitude is over highfly_altitude send a log and message for high-flying nodes and not in highfly_ignoreList
if position_data.get('altitude', 0) > highfly_altitude and highfly_enabled and str(nodeID) not in highfly_ignoreList and not isNodeBanned(nodeID):
logger.info(f"System: High Altitude {position_data['altitude']}m on Device: {rxNode} Channel: {channel} NodeID:{nodeID} Lat:{position_data.get('latitude', 0)} Lon:{position_data.get('longitude', 0)}")
@@ -1544,7 +1594,7 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
):
plane_alt = flight_info['altitude']
node_alt = position_data.get('altitude', 0)
if abs(node_alt - plane_alt) <= 900: # within 900m
if abs(node_alt - plane_alt) <= 1000: # within 1000 meters
msg += f"\nDetected near:\n{flight_info}"
send_message(msg, highfly_channel, 0, highfly_interface)
@@ -1769,12 +1819,12 @@ def saveLeaderboard():
def loadLeaderboard():
global meshLeaderboard
try:
initializeMeshLeaderboard()
defaults = meshLeaderboard.copy()
with open('data/leaderboard.pkl', 'rb') as f:
loaded = pickle.load(f)
defaults.update(loaded) # loaded values overwrite defaults
meshLeaderboard = defaults
# Merge with current default structure to add any new keys
initializeMeshLeaderboard() # sets meshLeaderboard to default structure
for k, v in loaded.items():
meshLeaderboard[k] = v
if logMetaStats:
logger.debug("System: Mesh Leaderboard loaded from leaderboard.pkl")
except FileNotFoundError:
@@ -1825,6 +1875,16 @@ def get_mesh_leaderboard(msg, fromID, deviceID):
result += f"🚀 Altitude: {int(round(value_m, 0))}m {get_name_from_number(nodeID, 'short', 1)}\n"
else:
result += f"🚀 Altitude: {int(value_ft)}ft {get_name_from_number(nodeID, 'short', 1)}\n"
# Tallest node
if meshLeaderboard['tallestNode']['nodeID']:
nodeID = meshLeaderboard['tallestNode']['nodeID']
value_m = meshLeaderboard['tallestNode']['value']
value_ft = round(value_m * 3.28084, 0)
if use_metric:
result += f"🪜 Tallest: {int(round(value_m, 0))}m {get_name_from_number(nodeID, 'short', 1)}\n"
else:
result += f"🪜 Tallest: {int(value_ft)}ft {get_name_from_number(nodeID, 'short', 1)}\n"
# Coldest temperature
if meshLeaderboard['coldestTemp']['nodeID']:
@@ -1925,12 +1985,6 @@ def get_sysinfo(nodeID=0, deviceID=1):
sysinfo += f"📊{stats}"
return sysinfo
async def BroadcastScheduler():
# handle schedule checks for the broadcast of messages
while True:
schedule.run_pending()
await asyncio.sleep(1)
async def handleSignalWatcher():
global lastHamLibAlert
# monitor rigctld for signal strength and frequency
@@ -1993,6 +2047,62 @@ async def handleFileWatcher():
await asyncio.sleep(1)
pass
async def handleWsjtxWatcher():
# monitor WSJT-X UDP broadcasts for decode messages
from modules.radio import wsjtxMsgQueue, wsjtxMonitor
from modules.settings import sigWatchBroadcastCh, sigWatchBroadcastInterface
# Start the WSJT-X monitor task
monitor_task = asyncio.create_task(wsjtxMonitor())
while True:
if wsjtxMsgQueue:
msg = wsjtxMsgQueue.pop(0)
logger.debug(f"System: Detected message from WSJT-X: {msg}")
# Broadcast to configured channels
if type(sigWatchBroadcastCh) is list:
for ch in sigWatchBroadcastCh:
if antiSpam and int(ch) != publicChannel:
send_message(msg, int(ch), 0, sigWatchBroadcastInterface)
else:
logger.warning(f"System: antiSpam prevented Alert from WSJT-X")
else:
if antiSpam and sigWatchBroadcastCh != publicChannel:
send_message(msg, int(sigWatchBroadcastCh), 0, sigWatchBroadcastInterface)
else:
logger.warning(f"System: antiSpam prevented Alert from WSJT-X")
await asyncio.sleep(0.5)
async def handleJs8callWatcher():
# monitor JS8Call TCP API for messages
from modules.radio import js8callMsgQueue, js8callMonitor
from modules.settings import sigWatchBroadcastCh, sigWatchBroadcastInterface
# Start the JS8Call monitor task
monitor_task = asyncio.create_task(js8callMonitor())
while True:
if js8callMsgQueue:
msg = js8callMsgQueue.pop(0)
logger.debug(f"System: Detected message from JS8Call: {msg}")
# Broadcast to configured channels
if type(sigWatchBroadcastCh) is list:
for ch in sigWatchBroadcastCh:
if antiSpam and int(ch) != publicChannel:
send_message(msg, int(ch), 0, sigWatchBroadcastInterface)
else:
logger.warning(f"System: antiSpam prevented Alert from JS8Call")
else:
if antiSpam and sigWatchBroadcastCh != publicChannel:
send_message(msg, int(sigWatchBroadcastCh), 0, sigWatchBroadcastInterface)
else:
logger.warning(f"System: antiSpam prevented Alert from JS8Call")
await asyncio.sleep(0.5)
async def retry_interface(nodeID):
global retry_int1, retry_int2, retry_int3, retry_int4, retry_int5, retry_int6, retry_int7, retry_int8, retry_int9
global max_retry_count1, max_retry_count2, max_retry_count3, max_retry_count4, max_retry_count5, max_retry_count6, max_retry_count7, max_retry_count8, max_retry_count9
@@ -2117,12 +2227,16 @@ async def process_vox_queue():
async def watchdog():
global localTelemetryData, retry_int1, retry_int2, retry_int3, retry_int4, retry_int5, retry_int6, retry_int7, retry_int8, retry_int9
logger.debug("System: Watchdog started")
wd_last_logged_minute = -1
while True:
await asyncio.sleep(20)
now = datetime.now()
# perform memory cleanup every 10 minutes
if datetime.now().minute % 10 == 0:
if now.minute % 20 == 0 and now.minute != wd_last_logged_minute:
# perform memory cleanup every 10 minutes
cleanup_memory()
wd_last_logged_minute = now.minute
# check all interfaces
for i in range(1, 10):
@@ -2142,7 +2256,7 @@ async def watchdog():
handleMultiPing(0, i)
if wxAlertBroadcastEnabled or emergencyAlertBrodcastEnabled or volcanoAlertBroadcastEnabled:
if wxAlertBroadcastEnabled or emergencyAlertBrodcastEnabled or volcanoAlertBroadcastEnabled or checklist_enabled:
handleAlertBroadcast(i)
intData = displayNodeTelemetry(0, i)

459
modules/test_bot.py Normal file
View File

@@ -0,0 +1,459 @@
# test_bot.py
# Unit tests for various modules in the meshing-around project
import os
import sys
# Add the parent directory to sys.path to allow module imports
parent_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, parent_path)
import unittest
import importlib
import pkgutil
import warnings
from modules.log import logger
from modules.settings import latitudeValue, longitudeValue
# Suppress ResourceWarning warnings for asyncio unclosed event here
warnings.filterwarnings("ignore", category=ResourceWarning)
modules_path = os.path.join(parent_path, 'modules')
# Limits API calls during testing
CHECKALL = False
# Check for a file named .checkall in the parent directory
checkall_path = os.path.join(parent_path, '.checkall')
if os.path.isfile(checkall_path):
CHECKALL = True
# List of module names to exclude
exclude = ['test_bot','udp', 'system', 'log', 'gpio', 'web','test_xtide',]
available_modules = [
m.name for m in pkgutil.iter_modules([modules_path])
if m.name not in exclude]
try:
print("\nImporting Core Modules:")
from modules.log import logger, getPrettyTime
print(" ✔ Imported 'log'")
# Set location default
lat = latitudeValue
lon = longitudeValue
print(f" ✔ Location set to Latitude: {lat}, Longitude: {lon}")
from modules.system import *
print(" ✔ Imported 'system'")
print("\nImporting non-excluded modules:")
for module_name in [m.name for m in pkgutil.iter_modules([modules_path])]:
if module_name not in exclude:
importlib.import_module(module_name)
print(f" ✔ Imported '{module_name}'")
except Exception as e:
print(f"\nError importing modules: {e}")
print("Run this program from the main program directory: python3 script/test_bot.py")
exit(1)
class TestBot(unittest.TestCase):
def test_example(self):
# Example test case
self.assertEqual(1 + 1, 2)
def test_load_bbsdb(self):
from bbstools import load_bbsdb
test_load = load_bbsdb()
self.assertTrue(test_load)
def test_bbs_list_messages(self):
from bbstools import bbs_list_messages
messages = bbs_list_messages()
print("list_messages() returned:", messages)
self.assertIsInstance(messages, str)
def test_initialize_checklist_database(self):
from checklist import initialize_checklist_database, process_checklist_command
result = initialize_checklist_database()
result1 = process_checklist_command(0, 'checklist', name="none", location="none")
self.assertTrue(result)
self.assertIsInstance(result1, str)
def test_init_news_sources(self):
from filemon import initNewsSources
result = initNewsSources()
self.assertTrue(result)
def test_get_nina_alerts(self):
from globalalert import get_nina_alerts
alerts = get_nina_alerts()
self.assertIsInstance(alerts, str)
def test_llmTool_get_google(self):
from llm import llmTool_get_google
result = llmTool_get_google("What is 2+2?", 1)
self.assertIsInstance(result, list)
def test_send_ollama_query(self):
from llm import send_ollama_query
response = send_ollama_query("Hello, Ollama!")
self.assertIsInstance(response, str)
def test_extract_search_terms(self):
from llm import extract_search_terms
# Test with capitalized terms
terms = extract_search_terms("What is Python programming?")
self.assertIsInstance(terms, list)
self.assertTrue(len(terms) > 0)
# Test with multiple capitalized words
terms2 = extract_search_terms("Tell me about Albert Einstein and Marie Curie")
self.assertIsInstance(terms2, list)
self.assertTrue(len(terms2) > 0)
def test_get_wiki_context(self):
from llm import get_wiki_context
# Test with a well-known topic
context = get_wiki_context("Python programming language")
self.assertIsInstance(context, str)
# Context might be empty if wiki is disabled or fails, that's ok
def test_get_moon_phase(self):
from space import get_moon
phase = get_moon(lat, lon)
self.assertIsInstance(phase, str)
def test_get_sun_times(self):
from space import get_sun
sun_times = get_sun(lat, lon)
self.assertIsInstance(sun_times, str)
def test_hf_band_conditions(self):
from space import hf_band_conditions
conditions = hf_band_conditions()
self.assertIsInstance(conditions, str)
def test_get_wikipedia_summary(self):
from wiki import get_wikipedia_summary
summary = get_wikipedia_summary("Python", location=(lat, lon))
self.assertIsInstance(summary, str)
def test_get_kiwix_summary(self):
from wiki import get_kiwix_summary
summary = get_kiwix_summary("Python")
self.assertIsInstance(summary, str)
def get_openskynetwork(self):
from locationdata import get_openskynetwork
flights = get_openskynetwork(lat, lon)
self.assertIsInstance(flights, str)
def test_initalize_qrz_database(self):
from qrz import initalize_qrz_database
result = initalize_qrz_database()
self.assertTrue(result)
def test_get_hamlib(self):
from radio import get_hamlib
frequency = get_hamlib('f')
self.assertIsInstance(frequency, str)
def test_get_rss_feed(self):
from rss import get_rss_feed
result = get_rss_feed('')
self.assertIsInstance(result, str)
##### GAMES Tests #####
def test_jokes(self):
from modules.games.joke import tell_joke
haha = tell_joke(nodeID=0, test=True)
print("Joke response:", haha)
self.assertIsInstance(haha, str)
def test_tictactoe_initial_and_move(self):
from games.tictactoe import tictactoe
user_id = "testuser"
# Start a new game (no move yet)
initial = tictactoe.play(user_id, "")
print("Initial response:", initial)
# Make a move, e.g., '1'
second = tictactoe.play(user_id, "1")
print("After move '1':", second)
self.assertIsInstance(initial, str)
self.assertIsInstance(second, str)
def test_playVideoPoker(self):
from games.videopoker import playVideoPoker
user_id = "testuser"
# Start a new game/session
initial = playVideoPoker(user_id, 'deal')
print("Initial response:", initial)
# Place a 5-coin bet
after_bet = playVideoPoker(user_id, '5')
print("After placing 5-coin bet:", after_bet)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_bet, str)
def test_play_blackjack(self):
from games.blackjack import playBlackJack
user_id = "testuser"
# Start a new game/session
initial = playBlackJack(user_id, 'deal')
print("Initial response:", initial)
# Place a 5-chip bet
after_bet = playBlackJack(user_id, '5')
print("After placing 5-chip bet:", after_bet)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_bet, str)
def test_hangman_initial_and_guess(self):
from games.hangman import hangman
user_id = "testuser"
# Start a new game (no guess yet)
initial = hangman.play(user_id, "")
print("Initial response:", initial)
# Guess a letter, e.g., 'e'
second = hangman.play(user_id, "e")
print("After guessing 'e':", second)
self.assertIsInstance(initial, str)
self.assertIsInstance(second, str)
def test_play_lemonade_stand(self):
from games.lemonade import playLemonstand, lemonadeTracker
user_id = "testuser"
# Ensure user is in tracker
if not any(u['nodeID'] == user_id for u in lemonadeTracker):
lemonadeTracker.append({'nodeID': user_id, 'cups': 0, 'lemons': 0, 'sugar': 0, 'cash': 30.0, 'start': 30.0, 'cmd': 'new', 'last_played': 0})
# Start a new game
initial = playLemonstand(user_id, "", newgame=True)
print("Initial response:", initial)
# Buy 1 box of cups
after_cups = playLemonstand(user_id, "1")
print("After buying 1 box of cups:", after_cups)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_cups, str)
def test_play_golfsim_one_hole(self):
from games.golfsim import playGolf
user_id = "testuser"
# Start a new game/hole
initial = playGolf(user_id, "", last_cmd="new")
print("Initial hole info:", initial)
# Take first shot with driver
after_shot = playGolf(user_id, "driver")
print("After hitting driver:", after_shot)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_shot, str)
def test_play_dopewar_choose_city_and_list(self):
from games.dopewar import playDopeWars
user_id = 1234567899 # Use a unique test user ID
# Start a new game, get city selection prompt
initial = playDopeWars(user_id, "")
print("Initial city selection:", initial)
# Choose city 1
after_city = playDopeWars(user_id, "1")
print("After choosing city 1 (main game list):", after_city)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_city, str)
def test_play_mastermind_one_guess(self):
from games.mmind import start_mMind
user_id = 1234567899 # Use a unique test user ID
# Start a new game (should prompt for difficulty/colors)
initial = start_mMind(user_id, "n")
print("Initial response (difficulty/colors):", initial)
# Make a guess (e.g., "RGBY" - valid for normal)
after_guess = start_mMind(user_id, "RGBY")
print("After guessing RGBY:", after_guess)
self.assertIsInstance(initial, str)
self.assertIsInstance(after_guess, str)
def test_quiz_game_answer_one_and_end(self):
from games.quiz import quizGamePlayer
quizmaster_id = "admin" # Use a valid quizmaster ID from bbs_admin_list
user_id = "testuser"
# Start the quiz as quizmaster
start_msg = quizGamePlayer.start_game(quizmaster_id)
print("Quiz start:", start_msg)
# User joins the quiz
join_msg = quizGamePlayer.join(user_id)
print("User joined:", join_msg)
# Get the first question (should be included in join_msg, but call explicitly for clarity)
question_msg = quizGamePlayer.next_question(user_id)
print("First question:", question_msg)
# Simulate answering with 'A' (adjust if your first question expects a different answer)
answer_msg = quizGamePlayer.answer(user_id, "A")
print("Answer response:", answer_msg)
# End the quiz as quizmaster
end_msg = quizGamePlayer.stop_game(quizmaster_id)
print("Quiz end:", end_msg)
self.assertIsInstance(start_msg, str)
self.assertIsInstance(join_msg, str)
self.assertIsInstance(question_msg, str)
self.assertIsInstance(answer_msg, str)
self.assertIsInstance(end_msg, str)
def test_survey_answer_one_and_end(self):
from survey import survey_module
user_id = "testuser"
survey_name = "example" # Make sure this survey exists in your data/surveys directory
# Start the survey
start_msg = survey_module.start_survey(user_id, survey_name)
print("Survey start:", start_msg)
# Answer the first question with 'A' (adjust if your survey expects a different type)
answer_msg = survey_module.answer(user_id, "A")
print("Answer response:", answer_msg)
# End the survey
end_msg = survey_module.end_survey(user_id)
print("Survey end:", end_msg)
self.assertIsInstance(start_msg, str)
self.assertIsInstance(answer_msg, str)
self.assertIsInstance(end_msg, str)
def test_hamtest_answer_one(self):
from games.hamtest import hamtest
user_id = "testuser"
# Start a new ham test game (default level: technician)
initial = hamtest.newGame(user_id)
print("Initial question:", initial)
# Answer the first question with 'A'
answer_msg = hamtest.answer(user_id, "A")
print("Answer response:", answer_msg)
self.assertIsInstance(initial, str)
self.assertIsInstance(answer_msg, str)
##### API Tests - Extended tests run only if CHECKALL is True #####
if CHECKALL:
logger.info("Running extended API tests as CHECKALL is enabled.")
def test_handledxcluster(self):
from modules.dxspot import handledxcluster
test_message = "DX band=20m mode=SSB of=K7MHI"
response = handledxcluster(test_message, nodeID=0, deviceID='testdevice')
print("DX Spotter response:", response)
self.assertIsInstance(response, str)
def test_getRepeaterBook(self):
from locationdata import getRepeaterBook
repeaters = getRepeaterBook(lat, lon)
self.assertIsInstance(repeaters, str)
def test_getArtSciRepeaters(self):
from locationdata import getArtSciRepeaters
repeaters = getArtSciRepeaters(lat, lon)
self.assertIsInstance(repeaters, str)
def test_get_NOAAtides(self):
from locationdata import get_NOAAtide
tides = get_NOAAtide(lat, lon)
self.assertIsInstance(tides, str)
def test_get_NOAAweather(self):
from locationdata import get_NOAAweather
weather = get_NOAAweather(lat, lon)
self.assertIsInstance(weather, str)
def test_where_am_i(self):
from locationdata import where_am_i
location = where_am_i(lat, lon)
self.assertIsInstance(location, str)
def test_getWeatherAlertsNOAA(self):
from locationdata import getWeatherAlertsNOAA
alerts = getWeatherAlertsNOAA(lat, lon)
if isinstance(alerts, tuple):
self.assertIsInstance(alerts[0], str)
else:
self.assertIsInstance(alerts, str)
def test_getActiveWeatherAlertsDetailNOAA(self):
from locationdata import getActiveWeatherAlertsDetailNOAA
alerts_detail = getActiveWeatherAlertsDetailNOAA(lat, lon)
self.assertIsInstance(alerts_detail, str)
def test_getIpawsAlerts(self):
from locationdata import getIpawsAlert
alerts = getIpawsAlert(lat, lon)
self.assertIsInstance(alerts, str)
def test_get_flood_noaa(self):
from locationdata import get_flood_noaa
flood_info = get_flood_noaa(lat, lon, 12484500) # Example gauge UID
self.assertIsInstance(flood_info, str)
def test_get_volcano_usgs(self):
from locationdata import get_volcano_usgs
volcano_info = get_volcano_usgs(lat, lon)
self.assertIsInstance(volcano_info, str)
def test_get_nws_marine_alerts(self):
from locationdata import get_nws_marine
marine_alerts = get_nws_marine('https://tgftp.nws.noaa.gov/data/forecasts/marine/coastal/pz/pzz135.txt',1) # Example zone
self.assertIsInstance(marine_alerts, str)
def test_checkUSGSEarthQuakes(self):
from locationdata import checkUSGSEarthQuake
earthquakes = checkUSGSEarthQuake(lat, lon)
self.assertIsInstance(earthquakes, str)
def test_getNextSatellitePass(self):
from space import getNextSatellitePass
pass_info = getNextSatellitePass('25544', lat, lon)
self.assertIsInstance(pass_info, str)
def test_get_wx_meteo(self):
from wx_meteo import get_wx_meteo
weather_report = get_wx_meteo(lat, lon)
self.assertIsInstance(weather_report, str)
def test_get_flood_openmeteo(self):
from wx_meteo import get_flood_openmeteo
flood_report = get_flood_openmeteo(lat, lon)
self.assertIsInstance(flood_report, str)
def test_check_callsign_match(self):
# Test the callsign filtering function for WSJT-X/JS8Call
from radio import check_callsign_match
# Test with empty filter (should match all)
self.assertTrue(check_callsign_match("CQ K7MHI CN87", []))
# Test exact match
self.assertTrue(check_callsign_match("CQ K7MHI CN87", ["K7MHI"]))
# Test case insensitive match
self.assertTrue(check_callsign_match("CQ k7mhi CN87", ["K7MHI"]))
self.assertTrue(check_callsign_match("CQ K7MHI CN87", ["k7mhi"]))
# Test no match
self.assertFalse(check_callsign_match("CQ W1AW FN31", ["K7MHI"]))
# Test multiple callsigns
self.assertTrue(check_callsign_match("CQ W1AW FN31", ["K7MHI", "W1AW"]))
self.assertTrue(check_callsign_match("K7MHI DE W1AW", ["K7MHI", "W1AW"]))
# Test portable/mobile suffixes
self.assertTrue(check_callsign_match("CQ K7MHI/P CN87", ["K7MHI"]))
self.assertTrue(check_callsign_match("W1AW-7", ["W1AW"]))
# Test no false positives with partial matches
self.assertFalse(check_callsign_match("CQ K7MHIX CN87", ["K7MHI"]))
self.assertFalse(check_callsign_match("K7 TEST", ["K7MHI"]))
if __name__ == '__main__':
if not CHECKALL:
print("\nNote: Extended API tests are skipped. To enable them, create a file named '.checkall' in the parent directory.\n")
unittest.main()

135
modules/test_xtide.py Normal file
View File

@@ -0,0 +1,135 @@
#!/usr/bin/env python3
"""
Test script for xtide module
Tests both NOAA (disabled) and tidepredict (when available) tide predictions
"""
import sys
import os
# Add parent directory to path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def test_xtide_import():
"""Test that xtide module can be imported"""
print("Testing xtide module import...")
try:
from modules import xtide
print(f"✓ xtide module imported successfully")
print(f" - tidepredict available: {xtide.TIDEPREDICT_AVAILABLE}")
return True
except Exception as e:
print(f"✗ Failed to import xtide: {e}")
return False
def test_locationdata_import():
"""Test that modified locationdata can be imported"""
print("\nTesting locationdata module import...")
try:
from modules import locationdata
print(f"✓ locationdata module imported successfully")
return True
except Exception as e:
print(f"✗ Failed to import locationdata: {e}")
return False
def test_settings():
"""Test that settings has useTidePredict option"""
print("\nTesting settings configuration...")
try:
from modules import settings as my_settings
has_setting = hasattr(my_settings, 'useTidePredict')
print(f"✓ settings module loaded")
print(f" - useTidePredict setting available: {has_setting}")
if has_setting:
print(f" - useTidePredict value: {my_settings.useTidePredict}")
return True
except Exception as e:
print(f"✗ Failed to load settings: {e}")
return False
def test_noaa_fallback():
"""Test NOAA API fallback (without enabling tidepredict)"""
print("\nTesting NOAA API (default mode)...")
try:
from modules import locationdata
from modules import settings as my_settings
# Test with Seattle coordinates (should use NOAA)
lat = 47.6062
lon = -122.3321
print(f" Testing with Seattle coordinates: {lat}, {lon}")
print(f" useTidePredict = {my_settings.useTidePredict}")
# Note: This will fail if we can't reach NOAA, but that's expected
result = locationdata.get_NOAAtide(str(lat), str(lon))
if result and "Error" not in result:
print(f"✓ NOAA API returned data")
print(f" First 100 chars: {result[:100]}")
return True
else:
print(f"⚠ NOAA API returned: {result[:100]}")
return True # Still pass as network might not be available
except Exception as e:
print(f"⚠ NOAA test encountered expected issue: {e}")
return True # Expected in test environment
def test_parse_coords():
"""Test coordinate parsing function"""
print("\nTesting coordinate parsing...")
try:
from modules.xtide import parse_station_coords
test_cases = [
(("43-36S", "172-43E"), (-43.6, 172.71666666666667)),
(("02-45N", "072-21E"), (2.75, 72.35)),
(("02-45S", "072-21W"), (-2.75, -72.35)),
]
all_passed = True
for (lat_str, lon_str), (expected_lat, expected_lon) in test_cases:
result_lat, result_lon = parse_station_coords(lat_str, lon_str)
if abs(result_lat - expected_lat) < 0.01 and abs(result_lon - expected_lon) < 0.01:
print(f"{lat_str}, {lon_str} -> {result_lat:.2f}, {result_lon:.2f}")
else:
print(f"{lat_str}, {lon_str} -> expected {expected_lat}, {expected_lon}, got {result_lat}, {result_lon}")
all_passed = False
return all_passed
except Exception as e:
print(f"✗ Coordinate parsing test failed: {e}")
import traceback
traceback.print_exc()
return False
def main():
"""Run all tests"""
print("=" * 60)
print("xtide Module Test Suite")
print("=" * 60)
results = []
results.append(("Import xtide", test_xtide_import()))
results.append(("Import locationdata", test_locationdata_import()))
results.append(("Settings configuration", test_settings()))
results.append(("Parse coordinates", test_parse_coords()))
results.append(("NOAA fallback", test_noaa_fallback()))
print("\n" + "=" * 60)
print("Test Results Summary")
print("=" * 60)
passed = sum(1 for _, result in results if result)
total = len(results)
for test_name, result in results:
status = "✓ PASS" if result else "✗ FAIL"
print(f"{status}: {test_name}")
print(f"\n{passed}/{total} tests passed")
return passed == total
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)

View File

@@ -1,121 +1,135 @@
# meshbot wiki module
from modules.log import *
import wikipedia # pip install wikipedia
from modules.log import logger
from modules.settings import (use_kiwix_server, kiwix_url, kiwix_library_name,
urlTimeoutSeconds, wiki_return_limit, ERROR_FETCHING_DATA, wikipedia_enabled)
#import wikipedia # pip install wikipedia
import requests
import bs4 as bs
from urllib.parse import quote
# Kiwix support for local wiki
if use_kiwix_server:
import requests
import bs4 as bs
from urllib.parse import quote
def tag_visible(element):
"""Filter visible text from HTML elements for Kiwix"""
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, bs.element.Comment):
return False
return True
# Kiwix helper functions (only loaded if use_kiwix_server is True)
if wikipedia_enabled and use_kiwix_server:
def tag_visible(element):
"""Filter visible text from HTML elements for Kiwix"""
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, bs.element.Comment):
return False
return True
def text_from_html(body):
"""Extract main article text from HTML content"""
soup = bs.BeautifulSoup(body, 'html.parser')
# Try to find the main content div (works for both Kiwix and Wikipedia HTML)
main = soup.find('div', class_='mw-parser-output')
if not main:
# Fallback: just use the body if main content div not found
main = soup.body
if not main:
return ""
texts = main.find_all(string=True)
visible_texts = filter(tag_visible, texts)
return " ".join(t.strip() for t in visible_texts if t.strip())
def text_from_html(body):
"""Extract visible text from HTML content"""
soup = bs.BeautifulSoup(body, 'html.parser')
texts = soup.find_all(string=True)
visible_texts = filter(tag_visible, texts)
return " ".join(t.strip() for t in visible_texts if t.strip())
def get_kiwix_summary(search_term, truncate=True):
"""Query local Kiwix server for Wikipedia article using only search results."""
if search_term is None or search_term.strip() == "":
return ERROR_FETCHING_DATA
try:
search_encoded = quote(search_term)
search_url = f"{kiwix_url}/search?content={kiwix_library_name}&pattern={search_encoded}"
response = requests.get(search_url, timeout=urlTimeoutSeconds)
def get_kiwix_summary(search_term):
"""Query local Kiwix server for Wikipedia article"""
try:
search_encoded = quote(search_term)
# Try direct article access first
wiki_article = search_encoded.capitalize().replace("%20", "_")
exact_url = f"{kiwix_url}/raw/{kiwix_library_name}/content/A/{wiki_article}"
response = requests.get(exact_url, timeout=urlTimeoutSeconds)
if response.status_code == 200:
# Extract and clean text
text = text_from_html(response.text)
# Remove common Wikipedia metadata prefixes
text = text.split("Jump to navigation", 1)[-1]
text = text.split("Jump to search", 1)[-1]
# Truncate to reasonable length (first few sentences)
sentences = text.split('. ')
summary = '. '.join(sentences[:wiki_return_limit])
if summary and not summary.endswith('.'):
summary += '.'
return summary.strip()[:500] # Hard limit at 500 chars
# If direct access fails, try search
search_url = f"{kiwix_url}/search?content={kiwix_library_name}&pattern={search_encoded}"
response = requests.get(search_url, timeout=urlTimeoutSeconds)
if response.status_code == 200 and "No results were found" not in response.text:
soup = BeautifulSoup(response.text, 'html.parser')
links = [a['href'] for a in soup.find_all('a', href=True) if "start=" not in a['href']]
for link in links[:3]: # Check first 3 results
article_name = link.split("/")[-1]
if not article_name or article_name[0].islower():
continue
article_url = f"{kiwix_url}{link}"
article_response = requests.get(article_url, timeout=urlTimeoutSeconds)
if article_response.status_code == 200:
text = text_from_html(article_response.text)
text = text.split("Jump to navigation", 1)[-1]
text = text.split("Jump to search", 1)[-1]
sentences = text.split('. ')
summary = '. '.join(sentences[:wiki_return_limit])
if summary and not summary.endswith('.'):
summary += '.'
if response.status_code == 200 and "No results were found" not in response.text:
soup = bs.BeautifulSoup(response.text, 'html.parser')
results = soup.select('div.results ul li')
logger.debug(f"Kiwix: Found {len(results)} results in search results for:{search_term}")
for li in results[:3]:
a = li.find('a', href=True)
if not a:
continue
article_url = f"{kiwix_url}{a['href']}"
article_response = requests.get(article_url, timeout=urlTimeoutSeconds)
if article_response.status_code == 200:
text = text_from_html(article_response.text)
# Remove navigation and search jump text
# text = text.split("Jump to navigation", 1)[-1]
# text = text.split("Jump to search", 1)[-1]
sentences = text.split('. ')
summary = '. '.join(sentences[:wiki_return_limit])
if summary and not summary.endswith('.'):
summary += '.'
if truncate:
return summary.strip()[:500]
logger.warning(f"System: No Kiwix Results for:{search_term}")
# try to fall back to online Wikipedia if available
return get_wikipedia_summary(search_term, force=True)
except requests.RequestException as e:
logger.warning(f"System: Kiwix connection error: {e}")
return "Unable to connect to local wiki server"
except Exception as e:
logger.warning(f"System: Error with Kiwix for:{search_term} {e}")
return ERROR_FETCHING_DATA
else:
return summary.strip()
def get_wikipedia_summary(search_term, location=None, force=False):
lat, lon = location if location else (None, None)
# Use Kiwix if configured
logger.debug(f"System: No Kiwix Results for:{search_term}")
if wikipedia_enabled:
logger.debug("Kiwix: Falling back to Wikipedia API.")
return get_wikipedia_summary(search_term, force=True)
return ERROR_FETCHING_DATA
except Exception as e:
logger.warning(f"System: Error with Kiwix for:{search_term} URL:{search_url} {e}")
return ERROR_FETCHING_DATA
def get_wikipedia_summary(search_term, location=None, force=False, truncate=True):
if use_kiwix_server and not force:
return get_kiwix_summary(search_term)
if not search_term or not search_term.strip():
return ERROR_FETCHING_DATA
api_url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{requests.utils.quote(search_term)}"
headers = {
"User-Agent": "MeshBot/1.0 (https://github.com/kkeeton/meshing-around; contact: youremail@example.com)"
}
try:
# Otherwise use online Wikipedia
wikipedia_search = wikipedia.search(search_term, results=3)
wikipedia_suggest = wikipedia.suggest(search_term)
#wikipedia_aroundme = wikipedia.geosearch(lat,lon, results=3)
#logger.debug(f"System: Wikipedia Nearby:{wikipedia_aroundme}")
response = requests.get(api_url, timeout=5, headers=headers)
if response.status_code == 404:
logger.warning(f"System: No Wikipedia Results for:{search_term}")
return ERROR_FETCHING_DATA
response.raise_for_status()
data = response.json()
logger.debug(f"Wikipedia API response for '{search_term}': {len(data)} keys")
if "extract" not in data or not data.get("extract"):
#logger.debug(f"System: Wikipedia API returned no extract for:{search_term} (data: {data})")
return ERROR_FETCHING_DATA
if data.get("type") == "disambiguation" or "may refer to:" in data.get("extract", ""):
#logger.warning(f"System: Disambiguation page for:{search_term} (data: {data})")
# Fetch and parse the HTML disambiguation page
html_url = f"https://en.wikipedia.org/wiki/{requests.utils.quote(search_term)}"
html_resp = requests.get(html_url, timeout=5, headers=headers)
if html_resp.status_code == 200:
soup = bs.BeautifulSoup(html_resp.text, 'html.parser')
items = soup.select('div.mw-parser-output ul li a[href^="/wiki/"]')
choices = []
for a in items:
title = a.get('title')
href = a.get('href')
# Filter out non-article links
if title and href and ':' not in href:
choices.append(f"{title} (https://en.wikipedia.org{href})")
if len(choices) >= 5:
break
if choices:
return f"'{search_term}' is ambiguous. Did you mean:\n- " + "\n- ".join(choices)
return f"'{search_term}' is ambiguous. Please be more specific. See: {html_url}"
summary = data.get("extract")
if not summary or not isinstance(summary, str) or not summary.strip():
#logger.debug(f"System: No summary found for:{search_term} (data: {data})")
return ERROR_FETCHING_DATA
sentences = [s for s in summary.split('. ') if s.strip()]
if not sentences:
return ERROR_FETCHING_DATA
summary = '. '.join(sentences[:wiki_return_limit])
if summary and not summary.endswith('.'):
summary += '.'
if truncate:
# Truncate to 500 characters
return summary.strip()[:500]
else:
return summary.strip()
except Exception as e:
logger.debug(f"System: Wikipedia search error for:{search_term} {e}")
logger.warning(f"System: Wikipedia API error for:{search_term} {e}")
return ERROR_FETCHING_DATA
if len(wikipedia_search) == 0:
logger.warning(f"System: No Wikipedia Results for:{search_term}")
return ERROR_FETCHING_DATA
try:
logger.debug(f"System: Searching Wikipedia for:{search_term}, First Result:{wikipedia_search[0]}, Suggest Word:{wikipedia_suggest}")
summary = wikipedia.summary(search_term, sentences=wiki_return_limit, auto_suggest=False, redirect=True)
except wikipedia.DisambiguationError as e:
logger.warning(f"System: Disambiguation Error for:{search_term} trying {wikipedia_search[0]}")
summary = wikipedia.summary(wikipedia_search[0], sentences=wiki_return_limit, auto_suggest=True, redirect=True)
except wikipedia.PageError as e:
logger.warning(f"System: Wikipedia Page Error for:{search_term} {e} trying {wikipedia_search[0]}")
summary = wikipedia.summary(wikipedia_search[0], sentences=wiki_return_limit, auto_suggest=True, redirect=True)
except Exception as e:
logger.warning(f"System: Error with Wikipedia for:{search_term} {e}")
return ERROR_FETCHING_DATA
return summary

View File

@@ -3,7 +3,8 @@
import requests
import json
from modules.log import *
from modules.log import logger
from modules.settings import ERROR_FETCHING_DATA
def get_weather_data(api_url, params):
response = requests.get(api_url, params=params)

129
modules/xtide.md Normal file
View File

@@ -0,0 +1,129 @@
# xtide Module - Global Tide Predictions
This module provides global tide prediction capabilities using the [tidepredict](https://github.com/windcrusader/tidepredict) library, which uses the University of Hawaii's Research Quality Dataset for worldwide tide station coverage.
## Features
- Global tide predictions (not limited to US locations like NOAA)
- Offline predictions once station data is initialized
- Automatic selection of nearest tide station
- Compatible with existing tide command interface
## Installation
1. Install tidepredict library:
this takes about 3-500MB of disk
```bash
pip install tidepredict
```
note: if you see warning about system packages the override for debian OS to install it anyway is..
```bash
pip install tidepredict --break-system-packages
```
2. Enable in `config.ini`:
```ini
[location]
useTidePredict = True
```
## First-Time Setup
On first use, tidepredict needs to download station data from the University of Hawaii FTP server. This requires internet access and happens automatically when you:
1. Run the tide command for the first time with `useTidePredict = True`
2. Or manually initialize with:
```bash
python3 -m tidepredict -l <location> -genharm
```
The station data is cached locally in `~/.tidepredict/` for offline use afterward.
No other downloads will happen automatically, its offline
## Usage
Once enabled, the existing `tide` command will automatically use tidepredict for global locations:
```
tide
```
The module will:
1. Find the nearest tide station to your GPS coordinates
2. Load harmonic constituents for that station
3. Calculate tide predictions for today
4. Format output compatible with mesh display
## Configuration
### config.ini Options
```ini
[location]
# Enable global tide predictions using tidepredict
useTidePredict = True
# Standard location settings still apply
lat = 48.50
lon = -123.0
useMetric = False
```
## Fallback Behavior
If tidepredict is not available or encounters errors, the module will automatically fall back to the NOAA API for US locations.
## Limitations
- First-time setup requires internet access to download station database
- Station coverage depends on University of Hawaii's dataset
- Predictions may be less accurate for locations far from tide stations
## Troubleshooting
### "Station database not initialized" error
This means the station data hasn't been downloaded yet. Ensure internet access and:
```bash
# Test station download
python3 -m tidepredict -l Sydney
# Or manually run initialization
python3 -c "from tidepredict import process_station_list; process_station_list.create_station_dataframe()"
```
### "No tide station found nearby"
The module couldn't find a nearby station. This may happen if:
- You're in a location without nearby tide monitoring stations
- The station database hasn't been initialized
- Network issues prevented loading the station list
Tide Station Map
[https://uhslc.soest.hawaii.edu/network/](https://uhslc.soest.hawaii.edu/network/)
- click on Tide Guages
- Find yourself on the map
- Locate the closest Gauge and its name (typically the city name)
To manually download data for the station first location the needed station id
- `python -m tidepredict -l "Port Angeles"` finds a station
- `python -m tidepredict -l "Port Angeles" -genharm` downloads that datafile
## Data Source
Tide predictions are based on harmonic analysis of historical tide data from:
- University of Hawaii Sea Level Center (UHSLC)
- Research Quality Dataset
- Global coverage with 600+ stations
## References
- [tidepredict GitHub](https://github.com/windcrusader/tidepredict)
- [UHSLC Data](https://uhslc.soest.hawaii.edu/)
- [pytides](https://github.com/sam-cox/pytides) - Underlying tide calculation library

202
modules/xtide.py Normal file
View File

@@ -0,0 +1,202 @@
# xtide.py - Global tide prediction using tidepredict library
# K7MHI Kelly Keeton 2025
import json
from datetime import datetime, timedelta
from modules.log import logger
import modules.settings as my_settings
try:
from tidepredict import processdata, process_station_list, constants, timefunc
from tidepredict.tide import Tide
import pandas as pd
TIDEPREDICT_AVAILABLE = True
except ImportError:
TIDEPREDICT_AVAILABLE = False
logger.error("xtide: tidepredict module not installed. Install with: pip install tidepredict")
def get_nearest_station(lat, lon):
"""
Find the nearest tide station to the given lat/lon coordinates.
Returns station code (e.g., 'h001a') or None if not found.
"""
if not TIDEPREDICT_AVAILABLE:
return None
try:
# Read the station list
try:
stations = pd.read_csv(constants.STATIONFILE)
except FileNotFoundError:
# If station file doesn't exist, create it (requires network)
logger.info("xtide: Creating station database from online source (requires network)")
try:
stations = process_station_list.create_station_dataframe()
except Exception as net_error:
logger.error(f"xtide: Failed to download station database: {net_error}")
return None
if stations.empty:
logger.error("xtide: No stations found in database")
return None
# Calculate distance to each station
# Using simple haversine-like calculation
def calc_distance(row):
try:
# Parse lat/lon from the format like "43-36S", "172-43E"
station_lat, station_lon = parse_station_coords(row['Lat'], row['Lon'])
# Simple distance calculation (not precise but good enough)
dlat = lat - station_lat
dlon = lon - station_lon
return (dlat**2 + dlon**2)**0.5
except:
return float('inf')
stations['distance'] = stations.apply(calc_distance, axis=1)
# Find the nearest station
nearest = stations.loc[stations['distance'].idxmin()]
if nearest['distance'] > 10: # More than ~10 degrees away, might be too far
logger.warning(f"xtide: Nearest station is {nearest['distance']:.1f}° away at {nearest['loc_name']}")
station_code = "h" + nearest['stat_idx'].lower()
logger.debug(f"xtide: Found nearest station: {nearest['loc_name']} ({station_code}) at {nearest['distance']:.2f}° away")
return station_code, nearest['loc_name'], nearest['country']
except Exception as e:
logger.error(f"xtide: Error finding nearest station: {e}")
return None
def parse_station_coords(lat_str, lon_str):
"""
Parse station coordinates from format like "43-36S", "172-43E"
Returns tuple of (latitude, longitude) as floats
"""
try:
# Parse latitude
lat_parts = lat_str.split('-')
lat_deg = float(lat_parts[0])
lat_min = float(lat_parts[1][:-1]) # Remove N/S
lat_dir = lat_parts[1][-1] # Get N/S
lat_val = lat_deg + lat_min/60.0
if lat_dir == 'S':
lat_val = -lat_val
# Parse longitude
lon_parts = lon_str.split('-')
lon_deg = float(lon_parts[0])
lon_min = float(lon_parts[1][:-1]) # Remove E/W
lon_dir = lon_parts[1][-1] # Get E/W
lon_val = lon_deg + lon_min/60.0
if lon_dir == 'W':
lon_val = -lon_val
return lat_val, lon_val
except Exception as e:
logger.debug(f"xtide: Error parsing coordinates {lat_str}, {lon_str}: {e}")
return 0.0, 0.0
def get_tide_predictions(lat=0, lon=0, days=1):
"""
Get tide predictions for the given location using tidepredict library.
Returns formatted string with tide predictions.
Parameters:
- lat: Latitude
- lon: Longitude
- days: Number of days to predict (default: 1)
Returns:
- Formatted string with tide predictions or error message
"""
if not TIDEPREDICT_AVAILABLE:
return "module not installed, see logs for more ⚓️"
if float(lat) == 0 and float(lon) == 0:
return "No GPS data for tide prediction"
try:
# Find nearest station
station_info = get_nearest_station(float(lat), float(lon))
if not station_info:
return "No tide station found nearby. Network may be required to download station data."
station_code, station_name, station_country = station_info
# Load station data
station_dict, harmfileloc = process_station_list.read_station_info_file()
# Check if harmonic data exists for this station
if station_code not in station_dict:
logger.warning(f"xtide: No harmonic data. python -m tidepredict -l \"{station_name}\" -genharm")
return f"Tide data not available for {station_name}. Station database may need initialization."
# Reconstruct tide model
tide = processdata.reconstruct_tide_model(station_dict, station_code)
if tide is None:
return f"Tide model unavailable for {station_name}"
# Set up time range (today only)
now = datetime.now()
start_time = now.strftime("%Y-%m-%d 00:00")
end_time = (now + timedelta(days=days)).strftime("%Y-%m-%d 00:00")
# Create time object
timeobj = timefunc.Tidetime(
st_time=start_time,
en_time=end_time,
station_tz=station_dict[station_code].get('tzone', 'UTC')
)
# Get predictions
predictions = processdata.predict_plain(tide, station_dict[station_code], 't', timeobj)
# Format output for mesh
lines = predictions.strip().split('\n')
if len(lines) > 2:
# Skip the header lines and format for mesh display
result = f"Tide: {station_name}\n"
tide_lines = lines[2:] # Skip first 2 header lines
# Format each tide prediction
for line in tide_lines[:8]: # Limit to 8 entries
parts = line.split()
if len(parts) >= 4:
date_str = parts[0]
time_str = parts[1]
height = parts[3]
tide_type = ' '.join(parts[4:])
# Convert to 12-hour format if not using zulu time
if not my_settings.zuluTime:
try:
time_obj = datetime.strptime(time_str, "%H%M")
hour = time_obj.hour
minute = time_obj.minute
if hour >= 12:
time_str = f"{hour-12 if hour > 12 else 12}:{minute:02d} PM"
else:
time_str = f"{hour if hour > 0 else 12}:{minute:02d} AM"
except:
pass
result += f"{tide_type} {time_str}, {height}\n"
return result.strip()
else:
return predictions
except FileNotFoundError as e:
logger.error(f"xtide: Station data file not found: {e}")
return "Tide station database not initialized. Network access required for first-time setup."
except Exception as e:
logger.error(f"xtide: Error getting tide predictions: {e}")
return f"Error getting tide data: {str(e)}"
def is_enabled():
"""Check if xtide/tidepredict is enabled in config"""
return getattr(my_settings, 'useTidePredict', False) and TIDEPREDICT_AVAILABLE

View File

@@ -12,7 +12,8 @@ import asyncio
import time # for sleep, get some when you can :)
from datetime import datetime
import random
from modules.log import *
from modules.log import logger, CustomFormatter, msgLogger
import modules.settings as my_settings
from modules.system import *
# Global Variables
@@ -134,13 +135,13 @@ def handle_ping(message_from_id, deviceID, message, hop, snr, rssi, isDM, chann
pingCount = int(message.split(" ")[1])
if pingCount == 123 or pingCount == 1234:
pingCount = 1
elif not autoPingInChannel and not isDM:
elif not my_settings.autoPingInChannel and not isDM:
# no autoping in channels
pingCount = 1
if pingCount > 51:
pingCount = 50
except:
except ValueError:
pingCount = -1
if pingCount > 1:
@@ -151,7 +152,7 @@ def handle_ping(message_from_id, deviceID, message, hop, snr, rssi, isDM, chann
msg = f"🚦Initalizing {pingCount} auto-ping"
# if not a DM add the username to the beginning of msg
if not useDMForResponse and not isDM:
if not my_settings.useDMForResponse and not isDM:
msg = "@" + get_name_from_number(message_from_id, 'short', deviceID) + " " + msg
return msg
@@ -161,8 +162,8 @@ def handle_motd(message, message_from_id, isDM):
isAdmin = False
msg = MOTD
# check if the message_from_id is in the bbs_admin_list
if bbs_admin_list != ['']:
for admin in bbs_admin_list:
if my_settings.bbs_admin_list != ['']:
for admin in my_settings.bbs_admin_list:
if str(message_from_id) == admin:
isAdmin = True
break
@@ -191,7 +192,7 @@ def handle_echo(message, message_from_id, deviceID, isDM, channel_number):
parts = message.lower().split("echo ", 1)
if len(parts) > 1 and parts[1].strip() != "":
echo_msg = parts[1]
if channel_number != echoChannel:
if channel_number != my_settings.echoChannel:
echo_msg = "@" + get_name_from_number(message_from_id, 'short', deviceID) + " " + echo_msg
return echo_msg
else:
@@ -238,7 +239,8 @@ def onReceive(packet, interface):
if DEBUGpacket:
# Debug print the interface object
for item in interface.__dict__.items(): intDebug = f"{item}\n"
for item in interface.__dict__.items():
intDebug = f"{item}\n"
logger.debug(f"System: Packet Received on {rxType} Interface\n {intDebug} \n END of interface \n")
# Debug print the packet for debugging
logger.debug(f"Packet Received\n {packet} \n END of packet \n")
@@ -373,7 +375,7 @@ def onReceive(packet, interface):
if hop in ("MQTT", "Gateway") and hop_count > 0:
hop = f"{hop_count} Hops"
if enableHopLogs:
if my_settings.enableHopLogs:
logger.debug(f"System: Packet HopDebugger: hop_away:{hop_away} hop_limit:{hop_limit} hop_start:{hop_start} calculated_hop_count:{hop_count} final_hop_value:{hop} via_mqtt:{via_mqtt} transport_mechanism:{transport_mechanism} Hostname:{rxNodeHostName}")
# check with stringSafeChecker if the message is safe
@@ -417,7 +419,7 @@ def onReceive(packet, interface):
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode)
else:
# or respond to channel message on the channel itself
if channel_number == publicChannel and antiSpam:
if channel_number == my_settings.publicChannel and my_settings.antiSpam:
# warning user spamming default channel
logger.warning(f"System: AntiSpam protection, sending DM to: {get_name_from_number(message_from_id, 'long', rxNode)}")
@@ -430,7 +432,7 @@ def onReceive(packet, interface):
else:
# message is not for bot to respond to
# ignore the message but add it to the message history list
if zuluTime:
if my_settings.zuluTime:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
else:
timestamp = datetime.now().strftime("%Y-%m-%d %I:%M:%S%p")
@@ -448,17 +450,17 @@ def onReceive(packet, interface):
msgLogger.info(f"Device:{rxNode} Channel:{channel_number} | {get_name_from_number(message_from_id, 'long', rxNode)} | " + message_string.replace('\n', '-nl-'))
# repeat the message on the other device
if repeater_enabled and multiple_interface:
if my_settings.repeater_enabled and multiple_interface:
# wait a responseDelay to avoid message collision from lora-ack.
time.sleep(responseDelay)
time.sleep(my_settings.responseDelay)
rMsg = (f"{message_string} From:{get_name_from_number(message_from_id, 'short', rxNode)}")
# if channel found in the repeater list repeat the message
if str(channel_number) in repeater_channels:
if str(channel_number) in my_settings.repeater_channels:
for i in range(1, 10):
if globals().get(f'interface{i}_enabled', False) and i != rxNode:
logger.debug(f"Repeating message on Device{i} Channel:{channel_number}")
send_message(rMsg, channel_number, 0, i)
time.sleep(responseDelay)
time.sleep(my_settings.responseDelay)
else:
# Evaluate non TEXT_MESSAGE_APP packets
consumeMetadata(packet, rxNode, channel_number)
@@ -467,66 +469,185 @@ def onReceive(packet, interface):
logger.debug(f"System: Error Packet = {packet}")
async def start_rx():
print (CustomFormatter.bold_white + "\nMeshtastic Autoresponder Bot CTL+C to exit\n" + CustomFormatter.reset)
# Start the receive subscriber using pubsub via meshtastic library
pub.subscribe(onReceive, 'meshtastic.receive')
pub.subscribe(onDisconnect, 'meshtastic.connection.lost')
for i in range(1, 10):
if globals().get(f'interface{i}_enabled', False):
myNodeNum = globals().get(f'myNodeNum{i}', 0)
logger.info(f"System: Autoresponder Started for Device{i} {get_name_from_number(myNodeNum, 'long', i)},"
f"{get_name_from_number(myNodeNum, 'short', i)}. NodeID: {myNodeNum}, {decimal_to_hex(myNodeNum)}")
if useDMForResponse:
logger.debug(f"System: Respond by DM only")
if log_messages_to_file:
logger.debug("System: Logging Messages to disk")
if syslog_to_file:
logger.debug("System: Logging System Logs to disk")
if motd_enabled:
logger.debug(f"System: MOTD Enabled using {MOTD}")
if enableEcho:
logger.debug(f"System: Echo command Enabled")
if sentry_enabled:
logger.debug(f"System: Sentry Mode Enabled {sentry_radius}m radius reporting to channel:{secure_channel}")
if highfly_enabled:
logger.debug(f"System: HighFly Enabled using {highfly_altitude}m limit reporting to channel:{highfly_channel}")
if repeater_enabled and multiple_interface:
logger.debug(f"System: Repeater Enabled for Channels: {repeater_channels}")
if bbs_enabled:
logger.debug(f"System: BBS Enabled, {bbsdb} has {len(bbs_messages)} messages. Direct Mail Messages waiting: {(len(bbs_dm) - 1)}")
if bbs_link_enabled:
if len(bbs_link_whitelist) > 0:
logger.debug(f"System: BBS Link Enabled with {len(bbs_link_whitelist)} peers")
else:
logger.debug(f"System: BBS Link Enabled allowing all")
if scheduler_enabled:
# Examples of using the scheduler, Times here are in 24hr format
# https://schedule.readthedocs.io/en/stable/
# Reminder Scheduler is enabled every Monday at noon send a log message
schedule.every().monday.at("12:00").do(lambda: logger.info("System: Scheduled Broadcast Reminder"))
logger.debug("System: Starting the broadcast scheduler")
await BroadcastScheduler()
logger.debug("System: RX Subscriber started")
# here we go loopty loo
while True:
await asyncio.sleep(0.5)
pass
def handle_boot(mesh=True):
try:
print (CustomFormatter.bold_white + f"\nMeshtastic Autoresponder Bot CTL+C to exit\n" + CustomFormatter.reset)
if mesh:
for i in range(1, 10):
if globals().get(f'interface{i}_enabled', False):
myNodeNum = globals().get(f'myNodeNum{i}', 0)
logger.info(f"System: Autoresponder Started for Device{i} {get_name_from_number(myNodeNum, 'long', i)},"
f"{get_name_from_number(myNodeNum, 'short', i)}. NodeID: {myNodeNum}, {decimal_to_hex(myNodeNum)}")
if llm_enabled:
logger.debug(f"System: Ollama LLM Enabled, loading model {my_settings.llmModel} please wait")
llmLoad = llm_query(" ")
if "trouble" not in llmLoad:
logger.debug(f"System: LLM Model {my_settings.llmModel} loaded")
if my_settings.bbs_enabled:
logger.debug(f"System: BBS Enabled, {bbsdb} has {len(bbs_messages)} messages. Direct Mail Messages waiting: {(len(bbs_dm) - 1)}")
if my_settings.bbs_link_enabled:
if len(bbs_link_whitelist) > 0:
logger.debug(f"System: BBS Link Enabled with {len(bbs_link_whitelist)} peers")
else:
logger.debug(f"System: BBS Link Enabled allowing all")
if my_settings.solar_conditions_enabled:
logger.debug("System: Celestial Telemetry Enabled")
if my_settings.location_enabled:
if my_settings.use_meteo_wxApi:
logger.debug("System: Location Telemetry Enabled using Open-Meteo API")
else:
logger.debug("System: Location Telemetry Enabled using NOAA API")
print("debug my_settings.scheduler_enabled:", my_settings.scheduler_enabled)
if my_settings.dad_jokes_enabled:
logger.debug("System: Dad Jokes Enabled!")
if my_settings.coastalEnabled:
logger.debug("System: Coastal Forecast and Tide Enabled!")
if games_enabled:
logger.debug("System: Games Enabled!")
if my_settings.wikipedia_enabled:
if my_settings.use_kiwix_server:
logger.debug(f"System: Wikipedia search Enabled using Kiwix server at {kiwix_url}")
else:
logger.debug("System: Wikipedia search Enabled")
if my_settings.rssEnable:
logger.debug(f"System: RSS Feed Reader Enabled for feeds: {rssFeedNames}")
if my_settings.radio_detection_enabled:
logger.debug(f"System: Radio Detection Enabled using rigctld at {my_settings.rigControlServerAddress} broadcasting to channels: {my_settings.sigWatchBroadcastCh} for {get_freq_common_name(get_hamlib('f'))}")
if my_settings.file_monitor_enabled:
logger.warning(f"System: File Monitor Enabled for {my_settings.file_monitor_file_path}, broadcasting to channels: {my_settings.file_monitor_broadcastCh}")
if my_settings.enable_runShellCmd:
logger.debug("System: Shell Command monitor enabled")
if my_settings.allowXcmd:
logger.warning("System: File Monitor shell XCMD Enabled")
if my_settings.read_news_enabled:
logger.debug(f"System: File Monitor News Reader Enabled for {my_settings.news_file_path}")
if my_settings.bee_enabled:
logger.debug("System: File Monitor Bee Monitor Enabled for bee.txt")
if my_settings.wxAlertBroadcastEnabled:
logger.debug(f"System: Weather Alert Broadcast Enabled on channels {my_settings.wxAlertBroadcastChannel}")
if my_settings.emergencyAlertBrodcastEnabled:
logger.debug(f"System: Emergency Alert Broadcast Enabled on channels {my_settings.emergencyAlertBroadcastCh} for FIPS codes {my_settings.myStateFIPSList}")
if my_settings.myStateFIPSList == ['']:
logger.warning("System: No FIPS codes set for iPAWS Alerts")
if my_settings.emergency_responder_enabled:
logger.debug(f"System: Emergency Responder Enabled on channels {my_settings.emergency_responder_alert_channel} for interface {my_settings.emergency_responder_alert_interface}")
if my_settings.volcanoAlertBroadcastEnabled:
logger.debug(f"System: Volcano Alert Broadcast Enabled on channels {my_settings.volcanoAlertBroadcastChannel}")
if my_settings.qrz_hello_enabled:
if my_settings.train_qrz:
logger.debug("System: QRZ Welcome/Hello Enabled with training mode")
else:
logger.debug("System: QRZ Welcome/Hello Enabled")
if my_settings.enableSMTP:
if my_settings.enableImap:
logger.debug("System: SMTP Email Alerting Enabled using IMAP")
else:
logger.warning("System: SMTP Email Alerting Enabled")
# Default Options
if my_settings.useDMForResponse:
logger.debug("System: Respond by DM only")
if my_settings.log_messages_to_file:
logger.debug("System: Logging Messages to disk")
if my_settings.syslog_to_file:
logger.debug("System: Logging System Logs to disk")
if my_settings.motd_enabled:
logger.debug(f"System: MOTD Enabled using {my_settings.MOTD} scheduler:{my_settings.schedulerMotd}")
if my_settings.sentry_enabled:
logger.debug(f"System: Sentry Mode Enabled {my_settings.sentry_radius}m radius reporting to channel:{my_settings.secure_channel} requestLOC:{reqLocationEnabled}")
if my_settings.sentryIgnoreList:
logger.debug(f"System: Sentry BlockList Enabled for nodes: {my_settings.sentryIgnoreList}")
if my_settings.sentryWatchList:
logger.debug(f"System: Sentry WatchList Enabled for nodes: {my_settings.sentryWatchList}")
if my_settings.highfly_enabled:
logger.debug(f"System: HighFly Enabled using {my_settings.highfly_altitude}m limit reporting to channel:{my_settings.highfly_channel}")
if my_settings.store_forward_enabled:
logger.debug(f"System: S&F(messages command) Enabled using limit: {storeFlimit} and reverse queue:{my_settings.reverseSF}")
if my_settings.enableEcho:
logger.debug("System: Echo command Enabled")
if my_settings.repeater_enabled and multiple_interface:
logger.debug(f"System: Repeater Enabled for Channels: {my_settings.repeater_channels}")
if my_settings.checklist_enabled:
logger.debug("System: CheckList Module Enabled")
if my_settings.ignoreChannels:
logger.debug(f"System: Ignoring Channels: {my_settings.ignoreChannels}")
if my_settings.noisyNodeLogging:
logger.debug("System: Noisy Node Logging Enabled")
if my_settings.logMetaStats:
logger.debug("System: Logging Metadata Stats Enabled, leaderboard")
if my_settings.scheduler_enabled:
logger.debug("System: Scheduler Enabled")
except Exception as e:
logger.error(f"System: Error during boot: {e}")
# Hello World
async def main():
tasks = []
try:
handle_boot(mesh=False) # pong bot
# Create core tasks
tasks.append(asyncio.create_task(start_rx(), name="pong_rx"))
tasks.append(asyncio.create_task(start_rx(), name="mesh_rx"))
tasks.append(asyncio.create_task(watchdog(), name="watchdog"))
# Add optional tasks
if file_monitor_enabled:
if my_settings.file_monitor_enabled:
tasks.append(asyncio.create_task(handleFileWatcher(), name="file_monitor"))
if my_settings.radio_detection_enabled:
tasks.append(asyncio.create_task(handleSignalWatcher(), name="hamlib"))
if my_settings.voxDetectionEnabled:
tasks.append(asyncio.create_task(voxMonitor(), name="vox_detection"))
if my_settings.scheduler_enabled:
from modules.scheduler import run_scheduler_loop, setup_scheduler
setup_scheduler(schedulerMotd, MOTD, schedulerMessage, schedulerChannel, schedulerInterface,
schedulerValue, schedulerTime, schedulerInterval)
tasks.append(asyncio.create_task(run_scheduler_loop(), name="scheduler"))
logger.debug(f"System: Starting {len(tasks)} async tasks")
# Wait for all tasks with proper exception handling

View File

@@ -7,6 +7,4 @@ maidenhead
beautifulsoup4
dadjokes
geopy
schedule
wikipedia
googlesearch-python
schedule

View File

@@ -34,8 +34,10 @@ print("---------------------------------------------------------------")
try:
# set the path to import the modules and config.ini
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from modules.log import *
from modules.system import *
from modules.log import logger, getPrettyTime
from modules.system import handleFavoriteNode
from modules.settings import LOGGING_LEVEL
from modules.system import compileFavoriteList
except Exception as e:
print(f"Error importing modules run this program from the main repo directory 'python3 script/addFav.py'")
print(f"if you forgot the rest of it.. git clone https://github.com/spudgunman/meshing-around")

View File

@@ -1,25 +1,17 @@
# How do I use this thing?
This is not a full turnkey setup for Docker yet but gets you most of the way there!
This is not a full turnkey setup for Docker yet?
## Setup New Image
`docker build -t meshing-around .`
`docker pull ghcr.io/spudgunman/meshing-around:main`
there is also [script/docker/docker-install.bat](script/docker/docker-install.bat) which will automate this.
`docker network create meshing-around-network`
## Ollama Image with compose
still a WIP
`docker compose up -d`
`docker compose run meshtasticd`
## Edit the config.ini in the docker
To edit the config.ini in the docker you can
`docker run -it --entrypoint /bin/bash meshing-around -c "nano /app/config.ini"`
`docker compose run meshing-around`
there is also [script/docker/docker-terminal.bat](script/docker/docker-terminal.bat) which will open nano to edit.
ctl+o to write out and exit editor in shell
`docker compose run ollama`
## other info
1. Ensure your serial port is properly shared.
2. Run the Docker container:
```sh
docker run --rm -it --device=/dev/ttyUSB0 meshing-around
```
`docker compose run debug-console`

View File

@@ -1,51 +0,0 @@
# OLD Docker Compose configuration for Meshing Around application with optional services.
# This setup includes the main Meshing Around service, with optional Ollama and Prometheus Node Exporter services.
# Adjust device mappings, ports, and configurations as needed for your environment.
configs:
me_config:
file: ./config.ini # Path to the configuration file for Meshing Around.
# Windows users may need to adjust the path format, e.g., C:/path/to/config.ini
services:
meshing-around:
build:
context: ../..
configs:
- source: me_config
target: /app/config.ini
devices: # Optional if using meshtasticd. Pass through radio device.
- /dev/ttyAMA10 # Replace this with your actual device!
#- /dev/ttyUSB0 # Example for USB device
user: 1000:1000 # run as non-root user for better security.
extra_hosts:
# - "host.docker.internal:host-gateway" # Enables access to host services from within the container.
container_name: meshing-around
restart: unless-stopped
#tty: true # Enable only if interactive terminal is needed.
ports:
#- "8420:8420" # web report interface
#- "443:443" # HTTPS interface meshtasticD
environment:
- OLLAMA_API_URL=http://ollama:11434
# Uncomment the following service if you want to enable Ollama for local LLM API access.
# ollama:
# image: ollama/ollama:0.5.1
# volumes:
# - ./ollama:/root/.ollama
# - ./ollama-entrypoint.sh:./entrypoint.sh
# container_name: ollama
# pull_policy: always
# tty: true
# restart: always
# entrypoint:
# - /usr/bin/bash
# - /script/docker/entrypoint.sh
# ports:
# - "11434:11434"
# healthcheck:
# test: "curl -f http://localhost:11434/api/tags | grep -q llama3.2:3b"
# interval: 30s
# timeout: 10s
# retries: 20

View File

24
script/docker/config.yaml Normal file
View File

@@ -0,0 +1,24 @@
### Template config.yaml
---
Lora:
GPIO:
GPS:
I2C:
Display:
Touchscreen:
Input:
Logging:
LogLevel: info # debug, info, warn, error
# TraceFile: /var/log/meshtasticd.json
# AsciiLogs: true # default if not specified is !isatty() on stdout
Webserver:
Port: 443 # Port for Webserver & Webservices
RootPath: /usr/share/meshtasticd/web # Root Dir of WebServer
# SSLKey: /etc/meshtasticd/ssl/private_key.pem # Path to SSL Key, generated if not present
# SSLCert: /etc/meshtasticd/ssl/certificate.pem # Path to SSL Certificate, generated if not present
General:
MaxNodes: 200
MaxMessageQueue: 100
ConfigDirectory: /etc/meshtasticd/config.d/
# MACAddress: AA:BB:CC:DD:EE:FF
MACAddressSource: eth0

View File

@@ -1,6 +0,0 @@
REM batch file to install docker on windows
REM docker compose up -d
cd ../../
docker build -t meshing-around .
REM docker-compose up -d
docker run -it --entrypoint /bin/bash meshing-around -c "nano /app/config.ini"

View File

@@ -1,2 +0,0 @@
REM launch meshing-around container with a terminal
docker run -it --entrypoint /bin/bash meshing-around

View File

@@ -1,6 +1,16 @@
#!/bin/bash
# instruction set the meshing-around docker container entrypoint
# Substitute environment variables in the config file (what is the purpose of this?)
# envsubst < /app/config.ini > /app/config.tmp && mv /app/config.tmp /app/config.ini
# Run the bot
exec python /app/mesh_bot.py
# if no config.ini exists, copy the default one
if [ ! -f /app/config.ini ]; then
cp /app/config.template /app/config.ini
ls -l /app/config.ini
# Set type = tcp in [interface]
sed -i '/^\[interface\]/,/^[^[]/ s/^type = .*/type = tcp/' /app/config.ini
# Remove any commented or uncommented hostname lines in [interface]
sed -i '/^\[interface\]/,/^[^[]/ s/^#\? *hostname = .*$//' /app/config.ini
# Add hostname = meshtasticd:4403 after [interface]
sed -i '/^\[interface\]/a hostname = UPDATE-DOCKER-IP' /app/config.ini
fi
# Run the bot as appuser (if you want to drop privileges)
exec python /app/mesh_bot.py

View File

@@ -1,16 +0,0 @@
#!/bin/bash
# Start Ollama in the background.
/bin/ollama serve &
# Record Process ID.
pid=$!
# Pause for Ollama to start.
sleep 5
echo "🔴 Retrieve gemma3:270m model..."
ollama pull gemma3:270m
echo "🟢 Done!"
# Wait for Ollama process to finish.
wait $pid

View File

@@ -14,8 +14,10 @@ print("---------------------------------------------------------------")
try:
# set the path to import the modules and config.ini
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from modules.log import *
from modules.bbstools import *
from modules.log import logger
from modules.bbstools import bbs_post_dm, bbs_dm, get_bbs_stats
from modules.settings import LOGGING_LEVEL, vpTracker, MOTD
logger.setLevel(LOGGING_LEVEL)
except Exception as e:
print(f"Error importing modules run this program from the main program directory 'python3 script/injectDM.py'")
exit(1)

View File

@@ -3,6 +3,7 @@
# Usage: bash update.sh or ./update.sh after making it executable with chmod +x update.sh
# Check if the mesh_bot.service or pong_bot.service
service_stopped=false
if systemctl is-active --quiet mesh_bot.service; then
echo "Stopping mesh_bot.service..."
systemctl stop mesh_bot.service
@@ -48,6 +49,8 @@ fi
if [[ ! -f modules/custom_scheduler.py ]]; then
cp -n etc/custom_scheduler.py modules/
printf "\nCustom scheduler template copied to modules/custom_scheduler.py\n"
elif ! cmp -s modules/custom_scheduler.template etc/custom_scheduler.py; then
echo "custom_scheduler.py is set. To check changes run: diff etc/custom_scheduler.py modules/custom_scheduler.py"
fi
# Backup the data/ directory
@@ -60,6 +63,24 @@ if [[ -f "modules/custom_scheduler.py" ]]; then
echo "Including custom_scheduler.py in backup..."
cp modules/custom_scheduler.py data/
fi
# Check config.ini ownership and permissions
if [[ -f "config.ini" ]]; then
owner=$(stat -f "%Su" config.ini)
perms=$(stat -f "%A" config.ini)
echo "config.ini is owned by: $owner"
echo "config.ini permissions: $perms"
if [[ "$owner" == "root" ]]; then
echo "Warning: config.ini is owned by root check out the etc/set-permissions.sh script"
fi
if [[ $(stat -f "%Lp" config.ini) =~ .*[7,6,2]$ ]]; then
echo "Warning: config.ini is world-writable or world-readable! check out the etc/set-permissions.sh script"
fi
echo "Including config.ini in backup..."
cp config.ini data/config.backup
fi
#create the tar.gz backup
tar -czf "$backup_file" "$path2backup"
if [ $? -ne 0 ]; then
echo "Error: Backup failed."
@@ -67,7 +88,6 @@ else
echo "Backup of ${path2backup} completed: ${backup_file}"
fi
# Build a config_new.ini file merging user config with new defaults
echo "Merging configuration files..."
python3 script/configMerge.py > ini_merge_log.txt 2>&1
@@ -81,14 +101,14 @@ else
echo "Configuration merge log (ini_merge_log.txt) not found. check out the script/configMerge.py tool!"
fi
# if service was stopped earlier, restart it
if [[ "$service_stopped" = true ]]; then
echo "Restarting services..."
systemctl start mesh_bot.service
systemctl start pong_bot.service
systemctl start mesh_bot_reporting.service
systemctl start mesh_bot_w3.service
echo "Services restarted."
for svc in mesh_bot.service pong_bot.service mesh_bot_reporting.service mesh_bot_w3.service; do
if systemctl list-unit-files | grep -q "^$svc"; then
systemctl start "$svc"
echo "$svc restarted."
fi
done
fi
# Print completion message