mirror of
https://github.com/SpudGunMan/meshing-around.git
synced 2026-05-15 21:55:37 +02:00
Compare commits
96 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9ca3781353 | |||
| 47e4121c4e | |||
| 8a0eb62574 | |||
| 873509b3cc | |||
| b03c5c9c2e | |||
| 2ce976ca8a | |||
| 76a5913e08 | |||
| 3819791fcd | |||
| 9fe580a3cb | |||
| 8567c3ad84 | |||
| f68f7f10ca | |||
| a02025d4a0 | |||
| 1a2225e833 | |||
| e3728a965a | |||
| 4dc6befeab | |||
| 219eea5399 | |||
| c987c1286e | |||
| 2ebf721bc9 | |||
| bdef9a1f08 | |||
| 2da56bc31f | |||
| 1e3c3b9ea0 | |||
| d01d7ae668 | |||
| b875eed9fd | |||
| e8cd85700c | |||
| 91b02fead4 | |||
| cba6fe3ba2 | |||
| 021efc8c63 | |||
| a4b67072cb | |||
| f1e1516919 | |||
| e675134d08 | |||
| 655f2bf7e5 | |||
| 46cd2a8051 | |||
| fcc4f24ea5 | |||
| 7ddf29ca06 | |||
| 372bc0c5a7 | |||
| b3bcb62f6c | |||
| 6fb33dde10 | |||
| 744ca772f2 | |||
| b5e0653839 | |||
| f7462a498e | |||
| 30609c822d | |||
| bbfce73aaa | |||
| 4f2cd2caef | |||
| 294c09754f | |||
| 9b69ca69c4 | |||
| 290c366cee | |||
| a7f0561f09 | |||
| 4496f19605 | |||
| 6499a6e619 | |||
| fe1444b025 | |||
| 7bfbae503a | |||
| 7cfb45d2b1 | |||
| 0fb351ef4d | |||
| 2f6abade80 | |||
| 5247f8d9d3 | |||
| b36059183c | |||
| f737e401a5 | |||
| 98b5f4fb7f | |||
| 17fa03ff9d | |||
| 40aaa7202c | |||
| 5088397856 | |||
| db1c31579c | |||
| dcf1b8f3cc | |||
| 2a7000a2e6 | |||
| aa0aaed0b5 | |||
| 9db4dc8ab9 | |||
| 85e8f41dca | |||
| ddb123b759 | |||
| 10afde663e | |||
| c931d13e6e | |||
| ba6075b616 | |||
| 68c065825b | |||
| 213f121807 | |||
| 530d78482a | |||
| 6c459cd317 | |||
| 626f0dddf7 | |||
| bb57301b20 | |||
| d3adf77896 | |||
| 157176acf7 | |||
| 4fd35dc004 | |||
| 955f7350e9 | |||
| 09515b9bc0 | |||
| 9b8c9d80c8 | |||
| 8ee838f5c6 | |||
| 757d6d30b8 | |||
| 1ee785d388 | |||
| c3284f0a0f | |||
| bdcc479360 | |||
| b1444b24e4 | |||
| aef67da492 | |||
| b8b8145447 | |||
| 42a4842a5b | |||
| 201591d469 | |||
| 4ecdc7b108 | |||
| 3f78bf7a67 | |||
| 8af21b760c |
@@ -25,10 +25,10 @@ jobs:
|
||||
#
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@28fdb31ff34708d19615a74d67103ddc2ea9725c
|
||||
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@8d8c7c12f7b958582a5cb82ba16d5903cb27976a
|
||||
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
||||
- name: Build and push Docker image
|
||||
id: push
|
||||
uses: docker/build-push-action@9e436ba9f2d7bcd1d038c8e55d039d37896ddc5d
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
|
||||
# This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see [Using artifact attestations to establish provenance for builds](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds).
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
uses: actions/attest-build-provenance@v4
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
+1
-1
@@ -28,4 +28,4 @@ modules/custom_scheduler.py
|
||||
venv/
|
||||
|
||||
# Python cache
|
||||
__pycache__/
|
||||
__pycache__/
|
||||
@@ -102,7 +102,7 @@ Advanced check-in/check-out and asset tracking for people and equipment—ideal
|
||||
- **Automatic Message Chunking**: Messages over 160 characters are automatically split to ensure reliable delivery across multiple hops.
|
||||
|
||||
## Getting Started
|
||||
This project is developed on Linux (specifically a Raspberry Pi) but should work on any platform where the [Meshtastic protobuf API](https://meshtastic.org/docs/software/python/cli/) modules are supported, and with any compatible [Meshtastic](https://meshtastic.org/docs/getting-started/) hardware, however it is **recomended to use the latest firmware code**. For pico or low-powered devices, see projects for embedding, armbian or [buildroot](https://github.com/buildroot-meshtastic/buildroot-meshtastic), also see [femtofox](https://github.com/noon92/femtofox) for running on luckfox hardware. If you need a local console consider the [firefly](https://github.com/pdxlocations/firefly) project.
|
||||
This project is developed on Linux (specifically a Raspberry Pi) but should work on any platform where the [Meshtastic protobuf API](https://meshtastic.org/docs/software/python/cli/) modules are supported, and with any compatible [Meshtastic](https://meshtastic.org/docs/getting-started/) hardware, however it is **recomended to use the latest firmware code**. For low-powered devices [mPWRD-OS](https://github.com/SpudGunMan/mPWRD-OS) for running on luckfox hardware. If you need a local console consider the [firefly](https://github.com/pdxlocations/firefly) project.
|
||||
|
||||
🥔 Please use responsibly and follow local rulings for such equipment. This project captures packets, logs them, and handles over the air communications which can include PII such as GPS locations.
|
||||
|
||||
|
||||
Executable
+29
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
cd "$BASE_DIR"
|
||||
|
||||
if [[ ! -d "$BASE_DIR/venv" ]]; then
|
||||
python3 -m venv "$BASE_DIR/venv"
|
||||
fi
|
||||
|
||||
source "$BASE_DIR/venv/bin/activate"
|
||||
"$BASE_DIR/venv/bin/pip" install -r "$BASE_DIR/requirements.txt"
|
||||
|
||||
mkdir -p "$BASE_DIR/data"
|
||||
cp -Rn "$BASE_DIR/etc/data/." "$BASE_DIR/data/"
|
||||
|
||||
if [[ ! -f "$BASE_DIR/config.ini" ]]; then
|
||||
cp "$BASE_DIR/config.template" "$BASE_DIR/config.ini"
|
||||
sleep 1
|
||||
replace="s|type = serial|type = tcp|g"
|
||||
sed -i.bak "$replace" "$BASE_DIR/config.ini"
|
||||
replace="s|# hostname = meshtastic.local|hostname = localhost|g"
|
||||
sed -i.bak "$replace" "$BASE_DIR/config.ini"
|
||||
rm -f "$BASE_DIR/config.ini.bak"
|
||||
else
|
||||
echo "config.ini already exists, leaving it unchanged."
|
||||
fi
|
||||
|
||||
deactivate
|
||||
+15
-3
@@ -200,6 +200,12 @@ lat = 48.50
|
||||
lon = -123.0
|
||||
fuzzConfigLocation = True
|
||||
fuzzItAll = False
|
||||
# database file for saved locations
|
||||
locations_db = data/locations.db
|
||||
# if True, only administrators can save public locations
|
||||
public_location_admin_manage = False
|
||||
# if True, only administrators can delete locations
|
||||
delete_public_locations_admins_only = False
|
||||
|
||||
# Default to metric units rather than imperial
|
||||
useMetric = False
|
||||
@@ -320,9 +326,9 @@ schedulerMotd = False
|
||||
# 'tide' (time/day), 'solar' (time/day) for automated information broadcasts, matching module needs enabled!
|
||||
# 'custom' for module/scheduler.py custom schedule examples
|
||||
value =
|
||||
# interval to use when time is not set (e.g. every 2 days)
|
||||
# interval for recurring schedules (e.g. every 2 days, or every 2 days at a set time)
|
||||
interval =
|
||||
# time of day in 24:00 hour format when value is 'day' and interval is not set
|
||||
# time of day in 24:00 hour format when value is 'day' (optional with interval)
|
||||
# Process run :00,:20,:40 try and vary the 20 minute offsets to avoid collision
|
||||
time =
|
||||
|
||||
@@ -490,4 +496,10 @@ autoBanThreshold = 5
|
||||
# Throttle value for API requests no ban_hammer
|
||||
apiThrottleValue = 20
|
||||
# Timeframe for offenses (in seconds)
|
||||
autoBanTimeframe = 3600
|
||||
autoBanTimeframe = 3600
|
||||
|
||||
[dataPersistence]
|
||||
# Enable or disable the data persistence loop service
|
||||
enabled = True
|
||||
# Interval in seconds for the persistence loop (how often to save data)
|
||||
interval = 300
|
||||
@@ -959,18 +959,6 @@
|
||||
"To relay messages between satellites"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E2A13",
|
||||
"correct": 1,
|
||||
"refs": "",
|
||||
"question": "Which of the following techniques is used by digital satellites to relay messages?",
|
||||
"answers": [
|
||||
"Digipeating",
|
||||
"Store-and-forward",
|
||||
"Multisatellite relaying",
|
||||
"Node hopping"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E2B01",
|
||||
"correct": 0,
|
||||
@@ -2495,18 +2483,6 @@
|
||||
"Utilizing a Class D final amplifier"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E4D05",
|
||||
"correct": 0,
|
||||
"refs": "",
|
||||
"question": "What transmitter frequencies would create an intermodulation-product signal in a receiver tuned to 146.70 MHz when a nearby station transmits on 146.52 MHz?",
|
||||
"answers": [
|
||||
"146.34 MHz and 146.61 MHz",
|
||||
"146.88 MHz and 146.34 MHz",
|
||||
"146.10 MHz and 147.30 MHz",
|
||||
"146.30 MHz and 146.90 MHz"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E4D06",
|
||||
"correct": 2,
|
||||
@@ -3851,18 +3827,6 @@
|
||||
"Permeability"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E6D07",
|
||||
"correct": 3,
|
||||
"refs": "",
|
||||
"question": "What is the current that flows in the primary winding of a transformer when there is no load on the secondary winding?",
|
||||
"answers": [
|
||||
"Stabilizing current",
|
||||
"Direct current",
|
||||
"Excitation current",
|
||||
"Magnetizing current"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "E6D08",
|
||||
"correct": 1,
|
||||
|
||||
@@ -35,18 +35,6 @@
|
||||
"12 meters"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1A04",
|
||||
"correct": 3,
|
||||
"refs": "[97.303(h)]",
|
||||
"question": "Which of the following amateur bands is restricted to communication only on specific channels, rather than frequency ranges?",
|
||||
"answers": [
|
||||
"11 meters",
|
||||
"12 meters",
|
||||
"30 meters",
|
||||
"60 meters"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1A05",
|
||||
"correct": 0,
|
||||
@@ -347,18 +335,6 @@
|
||||
"Submit a rule-making proposal to the FCC describing the codes and methods of the technique"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1C09",
|
||||
"correct": 2,
|
||||
"refs": "[97.313(i)]",
|
||||
"question": "What is the maximum power limit on the 60-meter band?",
|
||||
"answers": [
|
||||
"1500 watts PEP",
|
||||
"10 watts RMS",
|
||||
"ERP of 100 watts PEP with respect to a dipole",
|
||||
"ERP of 100 watts PEP with respect to an isotropic antenna"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1C11",
|
||||
"correct": 3,
|
||||
@@ -611,18 +587,6 @@
|
||||
"1500 watts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1E09",
|
||||
"correct": 0,
|
||||
"refs": "[97.115]",
|
||||
"question": "Under what circumstances are messages that are sent via digital modes exempt from Part 97 third-party rules that apply to other modes of communication?",
|
||||
"answers": [
|
||||
"Under no circumstances",
|
||||
"When messages are encrypted",
|
||||
"When messages are not encrypted",
|
||||
"When under automatic control"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G1E10",
|
||||
"correct": 0,
|
||||
@@ -4079,18 +4043,6 @@
|
||||
"All these choices are correct"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G8C01",
|
||||
"correct": 2,
|
||||
"refs": "",
|
||||
"question": "On what band do amateurs share channels with the unlicensed Wi-Fi service?",
|
||||
"answers": [
|
||||
"432 MHz",
|
||||
"902 MHz",
|
||||
"2.4 GHz",
|
||||
"10.7 GHz"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "G8C02",
|
||||
"correct": 0,
|
||||
|
||||
@@ -0,0 +1,173 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Install mesh_bot as a systemd service for the current user.
|
||||
# Defaults:
|
||||
# - project path: /opt/meshing-around
|
||||
# - service name: mesh_bot
|
||||
# - service user: invoking user (SUDO_USER when using sudo)
|
||||
|
||||
SERVICE_NAME="mesh_bot"
|
||||
PROJECT_PATH="/opt/meshing-around"
|
||||
SERVICE_USER="${SUDO_USER:-${USER:-}}"
|
||||
SERVICE_GROUP=""
|
||||
USE_LAUNCH_SH=1
|
||||
NEED_MESHTASTICD=1
|
||||
DRY_RUN=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
bash etc/install_service.sh [options]
|
||||
|
||||
Options:
|
||||
--project-path PATH Project root path (default: /opt/meshing-around)
|
||||
--user USER Linux user to run the service as (default: invoking user)
|
||||
--group GROUP Linux group to run the service as (default: user's primary group)
|
||||
--direct-python Run python3 mesh_bot.py directly (skip launch.sh)
|
||||
--no-meshtasticd Do not require meshtasticd.service to be present
|
||||
--dry-run Print actions without changing the system
|
||||
-h, --help Show this help
|
||||
|
||||
Examples:
|
||||
sudo bash etc/install_service.sh
|
||||
sudo bash etc/install_service.sh --project-path /opt/meshing-around --user $USER
|
||||
EOF
|
||||
}
|
||||
|
||||
log() {
|
||||
printf '[install_service] %s\n' "$*"
|
||||
}
|
||||
|
||||
die() {
|
||||
printf '[install_service] ERROR: %s\n' "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--project-path)
|
||||
[[ $# -ge 2 ]] || die "Missing value for --project-path"
|
||||
PROJECT_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--user)
|
||||
[[ $# -ge 2 ]] || die "Missing value for --user"
|
||||
SERVICE_USER="$2"
|
||||
shift 2
|
||||
;;
|
||||
--group)
|
||||
[[ $# -ge 2 ]] || die "Missing value for --group"
|
||||
SERVICE_GROUP="$2"
|
||||
shift 2
|
||||
;;
|
||||
--direct-python)
|
||||
USE_LAUNCH_SH=0
|
||||
shift
|
||||
;;
|
||||
--no-meshtasticd)
|
||||
NEED_MESHTASTICD=0
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
die "Unknown option: $1"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -n "$SERVICE_USER" ]] || die "Could not determine service user. Use --user USER."
|
||||
[[ "$SERVICE_USER" != "root" ]] || die "Refusing to install service as root. Use --user USER."
|
||||
|
||||
if ! id "$SERVICE_USER" >/dev/null 2>&1; then
|
||||
die "User '$SERVICE_USER' does not exist"
|
||||
fi
|
||||
|
||||
if [[ -z "$SERVICE_GROUP" ]]; then
|
||||
SERVICE_GROUP="$(id -gn "$SERVICE_USER")"
|
||||
fi
|
||||
|
||||
id -g "$SERVICE_USER" >/dev/null 2>&1 || die "Could not determine group for user '$SERVICE_USER'"
|
||||
[[ -d "$PROJECT_PATH" ]] || die "Project path not found: $PROJECT_PATH"
|
||||
[[ -f "$PROJECT_PATH/mesh_bot.py" ]] || die "mesh_bot.py not found in $PROJECT_PATH"
|
||||
|
||||
if [[ $USE_LAUNCH_SH -eq 1 ]]; then
|
||||
[[ -f "$PROJECT_PATH/launch.sh" ]] || die "launch.sh not found in $PROJECT_PATH"
|
||||
EXEC_START="/usr/bin/bash $PROJECT_PATH/launch.sh mesh"
|
||||
else
|
||||
EXEC_START="/usr/bin/python3 $PROJECT_PATH/mesh_bot.py"
|
||||
fi
|
||||
|
||||
if [[ $NEED_MESHTASTICD -eq 1 ]]; then
|
||||
if ! systemctl list-units --type=service --no-pager --all | grep meshtasticd.service; then
|
||||
die "meshtasticd.service dependency not found. to ignore this check, run with --no-meshtasticd flag."
|
||||
fi
|
||||
MESHTASTICD_DEPENDENCY_LINES=$'\nAfter=meshtasticd.service\nRequires=meshtasticd.service'
|
||||
else
|
||||
MESHTASTICD_DEPENDENCY_LINES=""
|
||||
fi
|
||||
|
||||
SERVICE_FILE_CONTENT="[Unit]
|
||||
Description=MESH-BOT
|
||||
After=network.target${MESHTASTICD_DEPENDENCY_LINES}
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$SERVICE_USER
|
||||
Group=$SERVICE_GROUP
|
||||
WorkingDirectory=$PROJECT_PATH
|
||||
ExecStart=$EXEC_START
|
||||
KillSignal=SIGINT
|
||||
Environment=REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
|
||||
Environment=SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||
Environment=PYTHONUNBUFFERED=1
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
"
|
||||
|
||||
TARGET_SERVICE_FILE="/etc/systemd/system/$SERVICE_NAME.service"
|
||||
|
||||
log "Service user: $SERVICE_USER"
|
||||
log "Service group: $SERVICE_GROUP"
|
||||
log "Project path: $PROJECT_PATH"
|
||||
log "Service file: $TARGET_SERVICE_FILE"
|
||||
log "ExecStart: $EXEC_START"
|
||||
|
||||
if [[ $DRY_RUN -eq 1 ]]; then
|
||||
log "Dry run mode enabled. Service file content:"
|
||||
printf '\n%s\n' "$SERVICE_FILE_CONTENT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
die "This script needs root privileges. Re-run with: sudo bash etc/install_service.sh"
|
||||
fi
|
||||
|
||||
printf '%s' "$SERVICE_FILE_CONTENT" > "$TARGET_SERVICE_FILE"
|
||||
chmod 644 "$TARGET_SERVICE_FILE"
|
||||
|
||||
# Ensure runtime files are writable by the service account.
|
||||
mkdir -p "$PROJECT_PATH/logs" "$PROJECT_PATH/data"
|
||||
chown -R "$SERVICE_USER:$SERVICE_GROUP" "$PROJECT_PATH/logs" "$PROJECT_PATH/data"
|
||||
if [[ -f "$PROJECT_PATH/config.ini" ]]; then
|
||||
chown "$SERVICE_USER:$SERVICE_GROUP" "$PROJECT_PATH/config.ini"
|
||||
chmod 664 "$PROJECT_PATH/config.ini"
|
||||
fi
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable "$SERVICE_NAME.service"
|
||||
systemctl restart "$SERVICE_NAME.service"
|
||||
|
||||
log "Service installed and started."
|
||||
log "Check status with: sudo systemctl status $SERVICE_NAME.service"
|
||||
log "View logs with: sudo journalctl -u $SERVICE_NAME.service -f"
|
||||
+12
@@ -107,6 +107,18 @@ if [[ ! -w ${program_path} ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# check if we have git and curl installed
|
||||
if ! command -v git &> /dev/null
|
||||
then
|
||||
printf "git not found, trying 'apt-get install git'\n"
|
||||
sudo apt-get install git
|
||||
fi
|
||||
if ! command -v curl &> /dev/null
|
||||
then
|
||||
printf "curl not found, trying 'apt-get install curl'\n"
|
||||
sudo apt-get install curl
|
||||
fi
|
||||
|
||||
# check if we are in /opt/meshing-around
|
||||
if [[ "$program_path" != "/opt/meshing-around" ]]; then
|
||||
echo "----------------------------------------------"
|
||||
|
||||
@@ -19,7 +19,7 @@ fi
|
||||
|
||||
export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
|
||||
export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||
|
||||
export HOME=$(pwd)
|
||||
# launch the application
|
||||
if [[ "$1" == pong* ]]; then
|
||||
python3 pong_bot.py
|
||||
|
||||
+74
-42
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
# Meshtastic Autoresponder MESH Bot
|
||||
# K7MHI Kelly Keeton 2025
|
||||
try:
|
||||
@@ -591,6 +591,11 @@ llmRunCounter = 0
|
||||
llmTotalRuntime = []
|
||||
llmLocationTable = [{'nodeID': 1234567890, 'location': 'No Location'},]
|
||||
|
||||
# Runtime safety caps to avoid unbounded growth on long-lived systems.
|
||||
MAX_SEEN_NODES = 5000
|
||||
MAX_LLM_LOCATION_ENTRIES = 50
|
||||
MAX_LLM_RUNTIME_SAMPLES = 50
|
||||
|
||||
def handle_satpass(message_from_id, deviceID, message='', vox=False):
|
||||
if vox:
|
||||
location = (my_settings.latitudeValue, my_settings.longitudeValue)
|
||||
@@ -656,6 +661,8 @@ def handle_llm(message_from_id, channel_number, deviceID, message, publicChannel
|
||||
# likely a DM
|
||||
user_input = message
|
||||
# consider this a command use for the cmdHistory list
|
||||
if len(cmdHistory) > 50:
|
||||
cmdHistory.pop(0)
|
||||
cmdHistory.append({'nodeID': message_from_id, 'cmd': 'llm-use', 'time': time.time()})
|
||||
|
||||
# check for a welcome message (is this redundant?)
|
||||
@@ -679,6 +686,8 @@ def handle_llm(message_from_id, channel_number, deviceID, message, publicChannel
|
||||
# if not in table add the location
|
||||
if not any(d['nodeID'] == message_from_id for d in llmLocationTable):
|
||||
llmLocationTable.append({'nodeID': message_from_id, 'location': location_name})
|
||||
if len(llmLocationTable) > MAX_LLM_LOCATION_ENTRIES:
|
||||
llmLocationTable = llmLocationTable[-MAX_LLM_LOCATION_ENTRIES:]
|
||||
|
||||
user_input = user_input.strip()
|
||||
|
||||
@@ -709,12 +718,15 @@ def handle_llm(message_from_id, channel_number, deviceID, message, publicChannel
|
||||
end = time.time()
|
||||
llmRunCounter += 1
|
||||
llmTotalRuntime.append(end - start)
|
||||
if len(llmTotalRuntime) > MAX_LLM_RUNTIME_SAMPLES:
|
||||
llmTotalRuntime = llmTotalRuntime[-MAX_LLM_RUNTIME_SAMPLES:]
|
||||
|
||||
return response
|
||||
|
||||
def handleDopeWars(message, nodeID, rxNode):
|
||||
global dwPlayerTracker
|
||||
global dwHighScore
|
||||
msg = ""
|
||||
|
||||
# Find player in tracker
|
||||
player = next((p for p in dwPlayerTracker if p.get('userID') == nodeID), None)
|
||||
@@ -725,7 +737,6 @@ def handleDopeWars(message, nodeID, rxNode):
|
||||
'userID': nodeID,
|
||||
'last_played': time.time(),
|
||||
'cmd': 'new',
|
||||
# ... add other fields as needed ...
|
||||
}
|
||||
dwPlayerTracker.append(player)
|
||||
msg = 'Welcome to 💊Dope Wars💉 You have ' + str(total_days) + ' days to make as much 💰 as possible! '
|
||||
@@ -738,11 +749,6 @@ def handleDopeWars(message, nodeID, rxNode):
|
||||
if p.get('userID') == nodeID:
|
||||
p['last_played'] = time.time()
|
||||
msg = playDopeWars(nodeID, message)
|
||||
|
||||
# if message starts wth 'e'xit remove player from tracker
|
||||
if message.lower().startswith('e'):
|
||||
dwPlayerTracker[:] = [p for p in dwPlayerTracker if p.get('userID') != nodeID]
|
||||
msg = 'You have exited Dope Wars.'
|
||||
return msg
|
||||
|
||||
def handle_gTnW(chess = False):
|
||||
@@ -1859,14 +1865,23 @@ def onReceive(packet, interface):
|
||||
# Priocess the incoming packet, handles the responses to the packet with auto_response()
|
||||
# Sends the packet to the correct handler for processing
|
||||
|
||||
if not isinstance(packet, dict):
|
||||
logger.warning(f"System: Ignoring malformed packet type: {type(packet).__name__}")
|
||||
return
|
||||
|
||||
decoded = packet.get('decoded')
|
||||
if not isinstance(decoded, dict):
|
||||
decoded = {}
|
||||
|
||||
# extract interface details from inbound packet
|
||||
rxType = type(interface).__name__
|
||||
|
||||
# Values assinged to the packet
|
||||
packet_id = None
|
||||
rxNode = message_from_id = snr = rssi = hop = hop_away = channel_number = hop_start = hop_count = hop_limit = 0
|
||||
pkiStatus = (False, 'ABC')
|
||||
rxNodeHostName = None
|
||||
replyIDset = False
|
||||
replyIDset = None
|
||||
emojiSeen = False
|
||||
simulator_flag = False
|
||||
isDM = False
|
||||
@@ -1904,8 +1919,8 @@ def onReceive(packet, interface):
|
||||
|
||||
if rxNode is None:
|
||||
# default to interface 1 ## FIXME needs better like a default interface setting or hash lookup
|
||||
if 'decoded' in packet and packet['decoded']['portnum'] in ['ADMIN_APP', 'SIMULATOR_APP']:
|
||||
session_passkey = packet.get('decoded', {}).get('admin', {}).get('sessionPasskey', None)
|
||||
if decoded.get('portnum') in ['ADMIN_APP', 'SIMULATOR_APP']:
|
||||
session_passkey = decoded.get('admin', {}).get('sessionPasskey', None)
|
||||
rxNode = 1
|
||||
|
||||
# check if the packet has a channel flag use it ## FIXME needs to be channel hash lookup
|
||||
@@ -1945,17 +1960,22 @@ def onReceive(packet, interface):
|
||||
# logger.debug(f"System: Received Packet on Channel:{channel_number} Name:{channel_name} on Interface:{rxNode}")
|
||||
|
||||
# check if the packet has a simulator flag
|
||||
simulator_flag = packet.get('decoded', {}).get('simulator', False)
|
||||
simulator_flag = decoded.get('simulator', False)
|
||||
if isinstance(simulator_flag, dict):
|
||||
# assume Software Simulator
|
||||
simulator_flag = True
|
||||
|
||||
# set the message_from_id
|
||||
message_from_id = packet['from']
|
||||
message_from_id = packet.get('from')
|
||||
if message_from_id is None:
|
||||
logger.warning(f"System: Ignoring packet missing 'from' field on Device:{rxNode}")
|
||||
return
|
||||
|
||||
# if message_from_id is not in the seenNodes list add it
|
||||
if not any(node.get('nodeID') == message_from_id for node in seenNodes):
|
||||
seenNodes.append({'nodeID': message_from_id, 'rxInterface': rxNode, 'channel': channel_number, 'welcome': False, 'first_seen': time.time(), 'lastSeen': time.time()})
|
||||
if len(seenNodes) > MAX_SEEN_NODES:
|
||||
seenNodes = seenNodes[-MAX_SEEN_NODES:]
|
||||
else:
|
||||
# update lastSeen time
|
||||
for node in seenNodes:
|
||||
@@ -1963,7 +1983,7 @@ def onReceive(packet, interface):
|
||||
node['lastSeen'] = time.time()
|
||||
break
|
||||
# BBS DM MAIL CHECKER
|
||||
if bbs_enabled and 'decoded' in packet:
|
||||
if bbs_enabled and decoded:
|
||||
msg = bbs_check_dm(message_from_id)
|
||||
if msg:
|
||||
logger.info(f"System: BBS DM Delivery: {msg[1]} For: {get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
@@ -1978,18 +1998,25 @@ def onReceive(packet, interface):
|
||||
|
||||
# handle TEXT_MESSAGE_APP
|
||||
try:
|
||||
if 'decoded' in packet and packet['decoded']['portnum'] == 'TEXT_MESSAGE_APP':
|
||||
message_bytes = packet['decoded']['payload']
|
||||
message_string = message_bytes.decode('utf-8')
|
||||
via_mqtt = packet['decoded'].get('viaMqtt', False)
|
||||
if decoded.get('portnum') == 'TEXT_MESSAGE_APP':
|
||||
message_bytes = decoded.get('payload', b'')
|
||||
if isinstance(message_bytes, bytes):
|
||||
message_string = message_bytes.decode('utf-8', errors='replace')
|
||||
elif isinstance(message_bytes, str):
|
||||
message_string = message_bytes
|
||||
else:
|
||||
logger.warning(f"System: Ignoring TEXT_MESSAGE_APP with invalid payload type: {type(message_bytes).__name__}")
|
||||
return
|
||||
message_log_string = message_string.replace('\r', ' ').replace('\n', ' ')
|
||||
via_mqtt = decoded.get('viaMqtt', False)
|
||||
transport_mechanism = (
|
||||
packet.get('transport_mechanism')
|
||||
or packet.get('transportMechanism')
|
||||
or (packet.get('decoded', {}).get('transport_mechanism'))
|
||||
or (packet.get('decoded', {}).get('transportMechanism'))
|
||||
or decoded.get('transport_mechanism')
|
||||
or decoded.get('transportMechanism')
|
||||
or 'unknown'
|
||||
)
|
||||
rx_time = packet['decoded'].get('rxTime', time.time())
|
||||
rx_time = decoded.get('rxTime', time.time())
|
||||
|
||||
# check if the packet is from us
|
||||
if message_from_id in [myNodeNum1, myNodeNum2, myNodeNum3, myNodeNum4, myNodeNum5, myNodeNum6, myNodeNum7, myNodeNum8, myNodeNum9]:
|
||||
@@ -2004,9 +2031,11 @@ def onReceive(packet, interface):
|
||||
if packet.get('publicKey'):
|
||||
pkiStatus = packet.get('pkiEncrypted', False), packet.get('publicKey', 'ABC')
|
||||
|
||||
# check if the packet has replyId flag // currently unused in the code
|
||||
if packet.get('replyId'):
|
||||
replyIDset = packet.get('replyId', False)
|
||||
# Use packet id for threaded replies;
|
||||
packet_id = packet.get('id', None)
|
||||
|
||||
# existing reply - unused for tracking
|
||||
replyIDSet = packet.get('replyIDSet', None)
|
||||
|
||||
# check if the packet has emoji flag set it // currently unused in the code
|
||||
if packet.get('emoji'):
|
||||
@@ -2071,13 +2100,13 @@ def onReceive(packet, interface):
|
||||
return
|
||||
|
||||
# If the packet is a DM (Direct Message) respond to it, otherwise validate its a message for us on the channel
|
||||
if packet['to'] in [myNodeNum1, myNodeNum2, myNodeNum3, myNodeNum4, myNodeNum5, myNodeNum6, myNodeNum7, myNodeNum8, myNodeNum9]:
|
||||
if packet.get('to') in [myNodeNum1, myNodeNum2, myNodeNum3, myNodeNum4, myNodeNum5, myNodeNum6, myNodeNum7, myNodeNum8, myNodeNum9]:
|
||||
# message is DM to us
|
||||
isDM = True
|
||||
# check if the message contains a trap word, DMs are always responded to
|
||||
if (messageTrap(message_string) and not llm_enabled) or messageTrap(message_string.split()[0]):
|
||||
# log the message to stdout
|
||||
logger.info(f"Device:{rxNode} Channel: {channel_number} " + CustomFormatter.green + f"Received DM: " + CustomFormatter.white + f"{message_string} " + CustomFormatter.purple +\
|
||||
logger.info(f"Device:{rxNode} Channel: {channel_number} " + CustomFormatter.green + f"Received DM: " + CustomFormatter.white + f"{message_log_string} " + CustomFormatter.purple +\
|
||||
"From: " + CustomFormatter.white + f"{get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
# respond with DM
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode)
|
||||
@@ -2087,7 +2116,7 @@ def onReceive(packet, interface):
|
||||
playingGame = checkPlayingGame(message_from_id, message_string, rxNode, channel_number)
|
||||
elif hop_count >= my_settings.game_hop_limit:
|
||||
if games_enabled:
|
||||
logger.warning(f"Device:{rxNode} Ignoring Request to Play Game: {message_string} From: {get_name_from_number(message_from_id, 'long', rxNode)} with hop count: {hop}")
|
||||
logger.warning(f"Device:{rxNode} Ignoring Request to Play Game: {message_log_string} From: {get_name_from_number(message_from_id, 'long', rxNode)} with hop count: {hop}")
|
||||
send_message(f"Your hop count exceeds safe playable distance at {hop_count} hops", channel_number, message_from_id, rxNode)
|
||||
else:
|
||||
playingGame = False
|
||||
@@ -2101,7 +2130,7 @@ def onReceive(packet, interface):
|
||||
send_message(llm, channel_number, message_from_id, rxNode)
|
||||
else:
|
||||
# respond with welcome message on DM
|
||||
logger.warning(f"Device:{rxNode} Ignoring DM: {message_string} From: {get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
logger.warning(f"Device:{rxNode} Ignoring DM: {message_log_string} From: {get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
|
||||
# if seenNodes list is not marked as welcomed send welcome message
|
||||
if not any(node['nodeID'] == message_from_id and node['welcome'] == True for node in seenNodes):
|
||||
@@ -2127,26 +2156,26 @@ def onReceive(packet, interface):
|
||||
|
||||
# log the message to the message log
|
||||
if log_messages_to_file:
|
||||
msgLogger.info(f"Device:{rxNode} Channel:{channel_number} | {get_name_from_number(message_from_id, 'long', rxNode)} | DM | " + message_string.replace('\n', '-nl-'))
|
||||
msgLogger.info(f"Device:{rxNode} Channel:{channel_number} | {get_name_from_number(message_from_id, 'long', rxNode)} | DM | " + message_log_string)
|
||||
else:
|
||||
# message is on a channel
|
||||
if messageTrap(message_string):
|
||||
# message is for us to respond to, or is it...
|
||||
if my_settings.ignoreDefaultChannel and channel_number == my_settings.publicChannel:
|
||||
logger.debug(f"System: Ignoring CMD:{message_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Default Channel:{channel_number}")
|
||||
logger.debug(f"System: Ignoring CMD:{message_log_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Default Channel:{channel_number}")
|
||||
elif str(message_from_id) in my_settings.bbs_ban_list:
|
||||
logger.debug(f"System: Ignoring CMD:{message_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Cantankerous Node")
|
||||
logger.debug(f"System: Ignoring CMD:{message_log_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Cantankerous Node")
|
||||
elif str(channel_number) in my_settings.ignoreChannels:
|
||||
logger.debug(f"System: Ignoring CMD:{message_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Ignored Channel:{channel_number}")
|
||||
logger.debug(f"System: Ignoring CMD:{message_log_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Ignored Channel:{channel_number}")
|
||||
elif my_settings.cmdBang and not message_string.startswith("!"):
|
||||
logger.debug(f"System: Ignoring CMD:{message_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Didnt sound like they meant it")
|
||||
logger.debug(f"System: Ignoring CMD:{message_log_string} From: {get_name_from_number(message_from_id, 'short', rxNode)} Didnt sound like they meant it")
|
||||
else:
|
||||
# message is for bot to respond to, seriously this time..
|
||||
logger.info(f"Device:{rxNode} Channel:{channel_number} " + CustomFormatter.green + "ReceivedChannel: " + CustomFormatter.white + f"{message_string} " + CustomFormatter.purple +\
|
||||
logger.info(f"Device:{rxNode} Channel:{channel_number} " + CustomFormatter.green + "ReceivedChannel: " + CustomFormatter.white + f"{message_log_string} " + CustomFormatter.purple +\
|
||||
"From: " + CustomFormatter.white + f"{get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
if my_settings.useDMForResponse:
|
||||
# respond to channel message via direct message
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode)
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode, reply_id=packet_id)
|
||||
else:
|
||||
# or respond to channel message on the channel itself
|
||||
if channel_number == my_settings.publicChannel and my_settings.antiSpam:
|
||||
@@ -2154,10 +2183,10 @@ def onReceive(packet, interface):
|
||||
logger.warning(f"System: AntiSpam protection, sending DM to: {get_name_from_number(message_from_id, 'long', rxNode)}")
|
||||
|
||||
# respond to channel message via direct message
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode)
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, message_from_id, rxNode, reply_id=packet_id)
|
||||
else:
|
||||
# respond to channel message on the channel itself
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, 0, rxNode)
|
||||
send_message(auto_response(message_string, snr, rssi, hop, pkiStatus, message_from_id, channel_number, rxNode, isDM), channel_number, 0, rxNode, reply_id=packet_id)
|
||||
|
||||
else:
|
||||
# message is not for us to respond to
|
||||
@@ -2177,9 +2206,9 @@ def onReceive(packet, interface):
|
||||
|
||||
# print the message to the log and sdout
|
||||
logger.info(f"Device:{rxNode} Channel:{channel_number} " + CustomFormatter.green + "Ignoring Message:" + CustomFormatter.white +\
|
||||
f" {message_string} " + CustomFormatter.purple + "From:" + CustomFormatter.white + f" {get_name_from_number(message_from_id)}")
|
||||
f" {message_log_string} " + CustomFormatter.purple + "From:" + CustomFormatter.white + f" {get_name_from_number(message_from_id)}")
|
||||
if my_settings.log_messages_to_file:
|
||||
msgLogger.info(f"Device:{rxNode} Channel:{channel_number} | {get_name_from_number(message_from_id, 'long', rxNode)} | " + message_string.replace('\n', '-nl-'))
|
||||
msgLogger.info(f"Device:{rxNode} Channel:{channel_number} | {get_name_from_number(message_from_id, 'long', rxNode)} | " + message_log_string)
|
||||
|
||||
# repeat the message on the other device
|
||||
if my_settings.repeater_enabled and my_settings.multiple_interface:
|
||||
@@ -2209,7 +2238,7 @@ def onReceive(packet, interface):
|
||||
hello(message_from_id, name)
|
||||
# send a hello message as a DM
|
||||
if not my_settings.train_qrz:
|
||||
send_message(f"Hello {name} {qrz_hello_string}", channel_number, message_from_id, rxNode)
|
||||
send_message(f"Hello {name} {qrz_hello_string}", channel_number, message_from_id, rxNode, reply_id=packet_id)
|
||||
|
||||
# handle mini games
|
||||
if my_settings.wordOfTheDay:
|
||||
@@ -2237,8 +2266,8 @@ def onReceive(packet, interface):
|
||||
else:
|
||||
# Evaluate non TEXT_MESSAGE_APP packets
|
||||
consumeMetadata(packet, rxNode, channel_number)
|
||||
except KeyError as e:
|
||||
logger.critical(f"System: Error processing packet: {e} Device:{rxNode}")
|
||||
except Exception as e:
|
||||
logger.exception(f"System: Error processing packet: {e} Device:{rxNode}")
|
||||
logger.debug(f"System: Error Packet = {packet}")
|
||||
|
||||
async def start_rx():
|
||||
@@ -2277,8 +2306,11 @@ async def main():
|
||||
# Create core tasks
|
||||
tasks.append(asyncio.create_task(start_rx(), name="mesh_rx"))
|
||||
tasks.append(asyncio.create_task(watchdog(), name="watchdog"))
|
||||
|
||||
|
||||
# Add optional tasks
|
||||
if my_settings.dataPersistence_enabled:
|
||||
tasks.append(asyncio.create_task(dataPersistenceLoop(), name="data_persistence"))
|
||||
|
||||
if my_settings.file_monitor_enabled:
|
||||
tasks.append(asyncio.create_task(handleFileWatcher(), name="file_monitor"))
|
||||
|
||||
|
||||
+104
-11
@@ -353,16 +353,15 @@ The system uses SQLite with four tables:
|
||||
| `howfar` | Distance traveled since last check |
|
||||
| `howtall` | Calculate height using sun angle |
|
||||
| `whereami` | Show current location/address |
|
||||
| `map` | Log/view location data to map.csv |
|
||||
| `map` | Save/retrieve locations, get headings, manage location database |
|
||||
Configure in `[location]` section of `config.ini`.
|
||||
|
||||
Certainly! Here’s a README help section for your `mapHandler` command, suitable for users of your meshbot:
|
||||
|
||||
---
|
||||
|
||||
## 📍 Map Command
|
||||
|
||||
The `map` command allows you to log your current GPS location with a custom description. This is useful for mapping mesh nodes, events, or points of interest.
|
||||
The `map` command provides a comprehensive location management system that allows you to save, retrieve, and manage locations in a SQLite database. You can save private locations (visible only to you) or public locations (visible to all nodes), get headings and distances to saved locations, and manage your location data.
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -370,23 +369,117 @@ The `map` command allows you to log your current GPS location with a custom desc
|
||||
```
|
||||
map help
|
||||
```
|
||||
Displays usage instructions for the map command.
|
||||
Displays usage instructions for all map commands.
|
||||
|
||||
- **Log a Location**
|
||||
- **Save a Private Location**
|
||||
```
|
||||
map <description>
|
||||
map save <name> [description]
|
||||
```
|
||||
Saves your current location as a private location (only visible to your node).
|
||||
|
||||
Examples:
|
||||
```
|
||||
map save BaseCamp
|
||||
map save BaseCamp Main base camp location
|
||||
```
|
||||
|
||||
- **Save a Public Location**
|
||||
```
|
||||
map save public <name> [description]
|
||||
```
|
||||
Saves your current location as a public location (visible to all nodes).
|
||||
|
||||
Examples:
|
||||
```
|
||||
map save public TrailHead
|
||||
map save public TrailHead Starting point for hiking trail
|
||||
```
|
||||
|
||||
**Note:** If `public_location_admin_manage = True` in config, only administrators can save public locations.
|
||||
|
||||
- **Get Heading to a Location**
|
||||
```
|
||||
map <name>
|
||||
```
|
||||
Retrieves a saved location and provides heading (bearing) and distance from your current position.
|
||||
|
||||
The system prioritizes your private location if both private and public locations exist with the same name.
|
||||
|
||||
Example:
|
||||
```
|
||||
map Found a new mesh node near the park
|
||||
map BaseCamp
|
||||
```
|
||||
Response includes:
|
||||
- Location coordinates
|
||||
- Compass heading (bearing)
|
||||
- Distance
|
||||
- Description (if provided)
|
||||
|
||||
- **Get Heading to a Public Location**
|
||||
```
|
||||
map public <name>
|
||||
```
|
||||
Specifically retrieves a public location, even if you have a private location with the same name.
|
||||
|
||||
Example:
|
||||
```
|
||||
map public BaseCamp
|
||||
```
|
||||
|
||||
- **List All Saved Locations**
|
||||
```
|
||||
map list
|
||||
```
|
||||
Lists all locations you can access:
|
||||
- Your private locations (🔒Private)
|
||||
- All public locations (🌐Public)
|
||||
|
||||
Locations are sorted with private locations first, then public locations, both alphabetically by name.
|
||||
|
||||
- **Delete a Location**
|
||||
```
|
||||
map delete <name>
|
||||
```
|
||||
Deletes a location from the database.
|
||||
|
||||
**Permission Rules:**
|
||||
- If `delete_public_locations_admins_only = False` (default):
|
||||
- Users can delete their own private locations
|
||||
- Users can delete public locations they created
|
||||
- Anyone can delete any public location
|
||||
- If `delete_public_locations_admins_only = True`:
|
||||
- Only administrators can delete public locations
|
||||
|
||||
The system prioritizes deleting your private location if both private and public locations exist with the same name.
|
||||
|
||||
- **Legacy CSV Logging**
|
||||
```
|
||||
map log <description>
|
||||
```
|
||||
Logs your current location to the legacy CSV file (`data/map_data.csv`) with a description. This is the original map functionality preserved for backward compatibility.
|
||||
|
||||
Example:
|
||||
```
|
||||
map log Found a new mesh node near the park
|
||||
```
|
||||
This will log your current location with the description "Found a new mesh node near the park".
|
||||
|
||||
### How It Works
|
||||
|
||||
- The bot records your user ID, latitude, longitude, and your description in a CSV file (`data/map_data.csv`).
|
||||
- If your location data is missing or invalid, you’ll receive an error message.
|
||||
- You can view or process the CSV file later for mapping or analysis.
|
||||
- **Database Storage:** All locations are stored in a SQLite database (`data/locations.db` by default, configurable via `locations_db` in config.ini).
|
||||
- **Location Types:**
|
||||
- **Private Locations:** Only visible to the node that created them
|
||||
- **Public Locations:** Visible to all nodes
|
||||
- **Conflict Resolution:** If you try to save a private location with the same name as an existing public location, you'll be prompted that there is a the public record with that name.
|
||||
- **Distance Calculation:** Uses the Haversine formula for accurate distance calculations. Distances less than 0.25 miles are displayed in feet; otherwise in miles (or kilometers if metric is enabled).
|
||||
- **Heading Calculation:** Provides compass bearing (0-360 degrees) from your current location to the target location.
|
||||
|
||||
### Configuration
|
||||
|
||||
Configure in `[location]` section of `config.ini`:
|
||||
|
||||
- `locations_db` - Path to the SQLite database file (default: `data/locations.db`)
|
||||
- `public_location_admin_manage` - If `True`, only administrators can save public locations (default: `False`)
|
||||
- `delete_public_locations_admins_only` - If `True`, only administrators can delete locations (default: `False`)
|
||||
|
||||
**Tip:** Use `map help` at any time to see these instructions in the bot.
|
||||
|
||||
|
||||
+14
-2
@@ -61,7 +61,7 @@ def save_bbsdb():
|
||||
|
||||
def bbs_help():
|
||||
# help message
|
||||
return "BBS Commands:\n'bbslist'\n'bbspost $subject #message'\n'bbsread #'\n'bbsdelete #'\n'cmd'"
|
||||
return "BBS Commands:\n'bbslist'\n'bbspost $subject #message'\n'bbspost @node #message' (DM)\n'bbsread #'\n'bbsdelete #'\n'cmd'"
|
||||
|
||||
def bbs_list_messages():
|
||||
#print (f"System: raw bbs_messages: {bbs_messages}")
|
||||
@@ -255,7 +255,19 @@ def bbs_sync_posts(input, peerNode, RxNode):
|
||||
#store the message
|
||||
subject = input.split("$")[1].split("#")[0]
|
||||
body = input.split("#")[1]
|
||||
fromNodeHex = input.split("@")[1]
|
||||
fromNodeHex = body.split("@")[1]
|
||||
#validate the fromNodeHex is a valid hex number
|
||||
try:
|
||||
int(fromNodeHex, 16)
|
||||
except ValueError:
|
||||
logger.error(f"System: Invalid fromNodeHex in bbslink from node {peerNode}: {input}")
|
||||
fromNodeHex = hex(peerNode)
|
||||
#validate the subject and body are not empty
|
||||
if subject.strip() == "" or body.strip() == "":
|
||||
logger.error(f"System: Empty subject or body in bbslink from node {peerNode}: {input}")
|
||||
return "System: Invalid bbslink format."
|
||||
|
||||
#store the message in the bbsdb
|
||||
try:
|
||||
bbs_post_message(subject, body, int(fromNodeHex, 16))
|
||||
except:
|
||||
|
||||
+1
-1
@@ -46,7 +46,7 @@ def handledxcluster(message, nodeID, deviceID):
|
||||
freq_hz = spot.get('freq', spot.get('frequency', None))
|
||||
frequency = f"{float(freq_hz)/1e6:.3f} MHz" if freq_hz else "N/A"
|
||||
mode_val = spot.get('mode', 'N/A')
|
||||
comment = spot.get('comment', '')
|
||||
comment = spot.get('comment') or ''
|
||||
if len(comment) > 111: # Truncate comment to 111 chars
|
||||
comment = comment[:111] + '...'
|
||||
sig = spot.get('sig', '')
|
||||
|
||||
@@ -6,6 +6,7 @@ import random
|
||||
import copy
|
||||
import uuid
|
||||
import time
|
||||
from modules.settings import battleshipTracker
|
||||
|
||||
OCEAN = "~"
|
||||
FIRE = "x"
|
||||
|
||||
@@ -5,6 +5,7 @@ import random
|
||||
import time
|
||||
import pickle
|
||||
from modules.log import logger
|
||||
from modules.settings import dwPlayerTracker
|
||||
|
||||
# Global variables
|
||||
total_days = 7 # number of days or rotations the player has to play
|
||||
@@ -382,15 +383,19 @@ def endGameDw(nodeID):
|
||||
with open('data/dopewar_hs.pkl', 'wb') as file:
|
||||
pickle.dump(dwHighScore, file)
|
||||
msg = "You finished with $" + "{:,}".format(cash) + " and beat the high score!🎉💰"
|
||||
return msg
|
||||
if cash > starting_cash:
|
||||
elif cash > starting_cash:
|
||||
msg = 'You made money! 💵 Up ' + str((cash/starting_cash).__round__()) + 'x! Well done.'
|
||||
return msg
|
||||
if cash == starting_cash:
|
||||
elif cash == starting_cash:
|
||||
msg = 'You broke even... hope you at least had fun 💉💊'
|
||||
return msg
|
||||
if cash < starting_cash:
|
||||
else:
|
||||
msg = "You lost money, better go get a real job.💸"
|
||||
|
||||
# remove player from all trackers and databases
|
||||
dwPlayerTracker[:] = [p for p in dwPlayerTracker if p.get('userID') != nodeID]
|
||||
dwCashDb[:] = [p for p in dwCashDb if p.get('userID') != nodeID]
|
||||
dwInventoryDb[:] = [p for p in dwInventoryDb if p.get('userID') != nodeID]
|
||||
dwLocationDb[:] = [p for p in dwLocationDb if p.get('userID') != nodeID]
|
||||
dwGameDayDb[:] = [p for p in dwGameDayDb if p.get('userID') != nodeID]
|
||||
|
||||
return msg
|
||||
|
||||
@@ -495,6 +500,11 @@ def playDopeWars(nodeID, cmd):
|
||||
if dwGameDayDb[i].get('userID') == nodeID:
|
||||
inGame = True
|
||||
|
||||
# Allow ending the game from any state while a session is active.
|
||||
cmd_normalized = str(cmd).strip().lower()
|
||||
if inGame and cmd_normalized in ['e', 'end', 'quit', 'exit']:
|
||||
return endGameDw(nodeID)
|
||||
|
||||
if not inGame:
|
||||
# initalize player in the database
|
||||
loc = generatelocations()
|
||||
@@ -605,9 +615,6 @@ def playDopeWars(nodeID, cmd):
|
||||
# render_game_screen
|
||||
msg = render_game_screen(nodeID, game_day, total_days, loc_choice, -1, price_list, 0, 'nothing')
|
||||
return msg
|
||||
elif 'e' in menu_choice:
|
||||
msg = endGameDw(nodeID)
|
||||
return msg
|
||||
else:
|
||||
msg = f'example buy:\nb,drug#,qty# or Sell: s,1,10 qty can be (m)ax\n f,p or end'
|
||||
return msg
|
||||
|
||||
@@ -5,6 +5,7 @@ import random
|
||||
import time
|
||||
import pickle
|
||||
from modules.log import logger
|
||||
from modules.settings import golfTracker
|
||||
|
||||
# Clubs setup
|
||||
driver_distances = list(range(230, 280, 5))
|
||||
|
||||
@@ -10,6 +10,7 @@ import json
|
||||
import random
|
||||
import os
|
||||
from modules.log import logger
|
||||
from modules.settings import hamtestTracker
|
||||
|
||||
class HamTest:
|
||||
def __init__(self):
|
||||
@@ -135,8 +136,16 @@ class HamTest:
|
||||
|
||||
# remove the game[id] from the list
|
||||
del self.game[id]
|
||||
# hamtestTracker stores dicts like {"nodeID": nodeID, ...}
|
||||
for i in range(len(hamtestTracker)):
|
||||
try:
|
||||
if hamtestTracker[i].get('nodeID') == id:
|
||||
hamtestTracker.pop(i)
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return msg
|
||||
|
||||
hamtestTracker = []
|
||||
hamtest = HamTest()
|
||||
|
||||
@@ -3,6 +3,7 @@ from modules.log import logger, getPrettyTime
|
||||
import os
|
||||
import json
|
||||
import random
|
||||
from modules.settings import hangmanTracker
|
||||
|
||||
class Hangman:
|
||||
WORDS = [
|
||||
|
||||
@@ -145,10 +145,9 @@ def tableOfContents():
|
||||
'file': '📁', 'folder': '📂', 'sports': '🏅', 'athlete': '🏃', 'competition': '🏆', 'race': '🏁', 'tournament': '🏆', 'champion': '🏆', 'medal': '🏅', 'victory': '🏆', 'win': '🏆', 'lose': '😞',
|
||||
'draw': '🤝', 'team': '👥', 'player': '👤', 'coach': '👨🏫', 'referee': '🧑⚖️', 'stadium': '🏟️', 'arena': '🏟️', 'field': '🏟️', 'court': '🏟️', 'track': '🏟️', 'gym': '🏋️', 'fitness': '🏋️', 'exercise': '🏋️',
|
||||
'workout': '🏋️', 'training': '🏋️', 'practice': '🏋️', 'game': '🎮', 'match': '🎮', 'score': '🏅', 'goal': '🥅', 'point': '🏅', 'basket': '🏀', 'home run': '⚾️', 'strike': '🎳', 'spare': '🎳', 'frame': '🎳',
|
||||
'inning': '⚾️', 'quarter': '🏈', 'half': '🏈', 'overtime': '🏈', 'penalty': '⚽️', 'foul': '⚽️', 'timeout': '⏱️', 'substitute': '🔄', 'bench': '🪑', 'sideline': '🏟️', 'dugout': '⚾️', 'locker room': '🚪', 'shower': '🚿',
|
||||
'uniform': '👕', 'jersey': '👕', 'cleats': '👟', 'helmet': '⛑️', 'pads': '🛡️', 'gloves': '🧤', 'bat': '⚾️', 'ball': '⚽️', 'puck': '🏒', 'stick': '🏒', 'net': '🥅', 'hoop': '🏀', 'goalpost': '🥅', 'whistle': '🔔',
|
||||
'scoreboard': '📊', 'fans': '👥', 'crowd': '👥', 'cheer': '📣', 'boo': '😠', 'applause': '👏', 'celebration': '🎉', 'parade': '🎉', 'trophy': '🏆', 'medal': '🏅', 'ribbon': '🎀', 'cup': '🏆', 'championship': '🏆',
|
||||
'league': '🏆', 'season': '🏆', 'playoffs': '🏆', 'finals': '🏆', 'runner-up': '🥈', 'third place': '🥉', 'snowman': '☃️', 'snowmen': '⛄️'
|
||||
'inning': '⚾️', 'shower': '🚿', 'uniform': '👕', 'jersey': '👕', 'cleats': '👟', 'helmet': '⛑️', 'pads': '🛡️', 'gloves': '🧤', 'bat': '⚾️', 'ball': '⚽️', 'puck': '🏒', 'stick': '🏒', 'net': '🥅', 'goalpost': '🥅',
|
||||
'scoreboard': '📊', 'fans': '👥', 'crowd': '👥', 'cheer': '📣', 'boo': '😠', 'applause': '👏', 'celebration': '🎉', 'parade': '🎉', 'trophy': '🏆', 'medal': '🏅', 'ribbon': '🎀',
|
||||
'third place': '🥉', 'snowman': '☃️', 'snowmen': '⛄️'
|
||||
}
|
||||
|
||||
return wordToEmojiMap
|
||||
|
||||
@@ -211,7 +211,7 @@ def compareCodeMMind(secret_code, user_guess, nodeID):
|
||||
def playGameMMind(diff, secret_code, turn_count, nodeID, message):
|
||||
msg = ''
|
||||
won = False
|
||||
if turn_count <= 10:
|
||||
if turn_count < 11:
|
||||
user_guess = getGuessMMind(diff, message, nodeID)
|
||||
if user_guess == "XXXX":
|
||||
msg += f"⛔️Invalid guess. Please enter 4 valid colors letters.\n🔴🟢🔵🔴 is RGBR"
|
||||
@@ -240,7 +240,7 @@ def playGameMMind(diff, secret_code, turn_count, nodeID, message):
|
||||
# reset turn count in tracker
|
||||
for i in range(len(mindTracker)):
|
||||
if mindTracker[i]['nodeID'] == nodeID:
|
||||
mindTracker[i]['turns'] = 0
|
||||
mindTracker[i]['turns'] = 1
|
||||
mindTracker[i]['secret_code'] = ''
|
||||
mindTracker[i]['cmd'] = 'new'
|
||||
|
||||
@@ -277,6 +277,7 @@ def start_mMind(nodeID, message):
|
||||
if mindTracker[i]['nodeID'] == nodeID:
|
||||
mindTracker[i]['cmd'] = 'makeCode'
|
||||
mindTracker[i]['diff'] = diff
|
||||
mindTracker[i]['turns'] = 1
|
||||
# Return color message to player
|
||||
msg += chooseDifficultyMMind(message.lower()[0])
|
||||
return msg
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import random
|
||||
import time
|
||||
import modules.settings as my_settings
|
||||
from modules.settings import tictactoeTracker
|
||||
|
||||
useSynchCompression = True
|
||||
if useSynchCompression:
|
||||
@@ -16,9 +17,14 @@ class TicTacToe:
|
||||
if getattr(my_settings, "disable_emojis_in_games", False):
|
||||
self.X = "X"
|
||||
self.O = "O"
|
||||
self.digit_emojis = None
|
||||
else:
|
||||
self.X = "❌"
|
||||
self.O = "⭕️"
|
||||
# Unicode emoji digits 1️⃣-9️⃣
|
||||
self.digit_emojis = [
|
||||
"1️⃣", "2️⃣", "3️⃣", "4️⃣", "5️⃣", "6️⃣", "7️⃣", "8️⃣", "9️⃣"
|
||||
]
|
||||
self.display_module = display_module
|
||||
self.game = {}
|
||||
self.win_lines_3d = self.generate_3d_win_lines()
|
||||
@@ -73,7 +79,13 @@ class TicTacToe:
|
||||
row = []
|
||||
for j in range(3):
|
||||
cell = b[i*3+j]
|
||||
row.append(cell if cell != " " else str(i*3+j+1))
|
||||
if cell != " ":
|
||||
row.append(cell)
|
||||
else:
|
||||
if self.digit_emojis:
|
||||
row.append(self.digit_emojis[i*3+j])
|
||||
else:
|
||||
row.append(str(i*3+j+1))
|
||||
s += " | ".join(row) + "\n"
|
||||
return s
|
||||
return ""
|
||||
@@ -147,10 +159,24 @@ class TicTacToe:
|
||||
msg = self.new_game(nodeID, new_mode, g["channel"], g["deviceID"])
|
||||
return msg
|
||||
|
||||
try:
|
||||
pos = int(input_msg)
|
||||
except Exception:
|
||||
return f"Enter a number between 1 and {max_pos}."
|
||||
# Accept emoji digits as input
|
||||
pos = None
|
||||
# Try to match emoji digits if enabled
|
||||
if self.digit_emojis:
|
||||
try:
|
||||
# Remove variation selectors for matching
|
||||
normalized_input = input_msg.replace("\ufe0f", "")
|
||||
for idx, emoji in enumerate(self.digit_emojis[:max_pos]):
|
||||
if normalized_input == emoji.replace("\ufe0f", ""):
|
||||
pos = idx + 1
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
if pos is None:
|
||||
try:
|
||||
pos = int(input_msg)
|
||||
except Exception:
|
||||
return f"Enter a number or emoji between 1 and {max_pos}."
|
||||
|
||||
if not self.make_move(nodeID, pos):
|
||||
return f"Invalid move! Pick 1-{max_pos}:"
|
||||
|
||||
@@ -4,6 +4,7 @@ import random
|
||||
import time
|
||||
import pickle
|
||||
from modules.log import logger, getPrettyTime
|
||||
from modules.settings import vpTracker
|
||||
|
||||
vpStartingCash = 20
|
||||
# Define the Card class
|
||||
@@ -260,6 +261,7 @@ class PlayerVP:
|
||||
|
||||
|
||||
def getLastCmdVp(nodeID):
|
||||
global vpTracker
|
||||
last_cmd = ""
|
||||
for i in range(len(vpTracker)):
|
||||
if vpTracker[i]['nodeID'] == nodeID:
|
||||
@@ -267,6 +269,7 @@ def getLastCmdVp(nodeID):
|
||||
return last_cmd
|
||||
|
||||
def setLastCmdVp(nodeID, cmd):
|
||||
global vpTracker
|
||||
for i in range(len(vpTracker)):
|
||||
if vpTracker[i]['nodeID'] == nodeID:
|
||||
vpTracker[i]['cmd'] = cmd
|
||||
|
||||
+782
-31
@@ -14,6 +14,7 @@ import modules.settings as my_settings
|
||||
import math
|
||||
import csv
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
trap_list_location = ("whereami", "wx", "wxa", "wxalert", "rlist", "ea", "ealert", "riverflow", "valert", "earthquake", "howfar", "map",)
|
||||
|
||||
@@ -83,8 +84,10 @@ def getRepeaterBook(lat=0, lon=0):
|
||||
elsewhereapi = "https://www.repeaterbook.com/row_repeaters/prox2_result.php?"
|
||||
if grid[:2] in ['CN', 'DN', 'EN', 'FN', 'CM', 'DM', 'EM', 'FM', 'DL', 'EL', 'FL']:
|
||||
repeater_url = usapi
|
||||
logger.debug("Location: Fetching repeater data from RepeaterBook US API for grid " + grid)
|
||||
else:
|
||||
repeater_url = elsewhereapi
|
||||
logger.debug("Location: Fetching repeater data from RepeaterBook International API for grid " + grid)
|
||||
|
||||
repeater_url += f"city={grid}&lat=&long=&distance=50&Dunit=m&band%5B%5D=4&band%5B%5D=16&freq=&call=&mode%5B%5D=1&mode%5B%5D=2&mode%5B%5D=4&mode%5B%5D=64&status_id=1&use=%25&use=OPEN&order=distance_calc%2C+state_id+ASC"
|
||||
|
||||
@@ -92,10 +95,14 @@ def getRepeaterBook(lat=0, lon=0):
|
||||
msg = ''
|
||||
user_agent = {'User-agent': 'Mozilla/5.0'}
|
||||
response = requests.get(repeater_url, headers=user_agent, timeout=my_settings.urlTimeoutSeconds)
|
||||
if response.status_code!=200:
|
||||
# Fail early on bad HTTP status
|
||||
if response.status_code != 200:
|
||||
logger.error(f"Location:Error fetching repeater data from {repeater_url} with status code {response.status_code}")
|
||||
return my_settings.ERROR_FETCHING_DATA
|
||||
|
||||
soup = bs.BeautifulSoup(response.text, 'html.parser')
|
||||
table = soup.find('table', attrs={'class': 'table table-striped table-hover align-middle sortable'})
|
||||
# match the repeater table by presence of the "sortable" class (class order/extra classes may vary)
|
||||
table = soup.select_one('table.sortable')
|
||||
if table is not None:
|
||||
cells = table.find_all('td')
|
||||
data = []
|
||||
@@ -115,6 +122,8 @@ def getRepeaterBook(lat=0, lon=0):
|
||||
}
|
||||
data.append(repeater)
|
||||
else:
|
||||
# No table found — could be legitimately no data or markup change.
|
||||
logger.debug("Location: No repeater table found on RepeaterBook page, scraping failed or no data for region.")
|
||||
msg = "No Data for your Region"
|
||||
except Exception as e:
|
||||
msg = "No repeaters found 😔"
|
||||
@@ -419,7 +428,11 @@ def getWeatherAlertsNOAA(lat=0, lon=0, useDefaultLatLon=False):
|
||||
alertxml = xml.dom.minidom.parseString(alert_data.text)
|
||||
for i in alertxml.getElementsByTagName("entry"):
|
||||
title = i.getElementsByTagName("title")[0].childNodes[0].nodeValue
|
||||
area_desc = i.getElementsByTagName("cap:areaDesc")[0].childNodes[0].nodeValue
|
||||
area_desc_nodes = i.getElementsByTagName("cap:areaDesc")
|
||||
if area_desc_nodes and area_desc_nodes[0].childNodes:
|
||||
area_desc = area_desc_nodes[0].childNodes[0].nodeValue
|
||||
else:
|
||||
area_desc = ""
|
||||
|
||||
# Extract NWSheadline from cap:parameter if present
|
||||
nws_headline = ""
|
||||
@@ -1159,6 +1172,507 @@ def get_openskynetwork(lat=0, lon=0, altitude=0, node_altitude=0, altitude_windo
|
||||
logger.debug(f"SYSTEM: Location HighFly: Error processing OpenSky Network data: {e}")
|
||||
return False
|
||||
|
||||
def get_public_location_admin_manage():
|
||||
"""Get the public_location_admin_manage setting directly from config file
|
||||
This ensures the setting is reloaded fresh on first load of the program
|
||||
"""
|
||||
import configparser
|
||||
config = configparser.ConfigParser()
|
||||
try:
|
||||
config.read("config.ini", encoding='utf-8')
|
||||
return config['location'].getboolean('public_location_admin_manage', False)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_delete_public_locations_admins_only():
|
||||
"""Get the delete_public_locations_admins_only setting directly from config file
|
||||
This ensures the setting is reloaded fresh on first load of the program
|
||||
"""
|
||||
import configparser
|
||||
config = configparser.ConfigParser()
|
||||
try:
|
||||
config.read("config.ini", encoding='utf-8')
|
||||
return config['location'].getboolean('delete_public_locations_admins_only', False)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_node_altitude(nodeID, deviceID=1):
|
||||
"""Get altitude for a node from position data or positionMetadata
|
||||
|
||||
Returns altitude in meters, or None if not available
|
||||
"""
|
||||
try:
|
||||
import modules.system as system_module
|
||||
|
||||
# Try to get altitude from node position dict first
|
||||
# Access interface dynamically from system module
|
||||
interface = getattr(system_module, f'interface{deviceID}', None)
|
||||
if interface and hasattr(interface, 'nodes') and interface.nodes:
|
||||
for node in interface.nodes.values():
|
||||
if nodeID == node['num']:
|
||||
pos = node.get('position')
|
||||
if pos and isinstance(pos, dict) and pos.get('altitude') is not None:
|
||||
try:
|
||||
altitude = float(pos['altitude'])
|
||||
if altitude > 0: # Valid altitude
|
||||
return altitude
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Fall back to positionMetadata (from POSITION_APP packets)
|
||||
positionMetadata = getattr(system_module, 'positionMetadata', None)
|
||||
if positionMetadata and nodeID in positionMetadata:
|
||||
metadata = positionMetadata[nodeID]
|
||||
if 'altitude' in metadata:
|
||||
altitude = metadata.get('altitude', 0)
|
||||
if altitude and altitude > 0:
|
||||
return float(altitude)
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.debug(f"Location: Error getting altitude for node {nodeID}: {e}")
|
||||
return None
|
||||
|
||||
def initialize_locations_database():
|
||||
"""Initialize the SQLite database for storing saved locations"""
|
||||
try:
|
||||
# Ensure data directory exists
|
||||
db_dir = os.path.dirname(my_settings.locations_db)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
logger.debug("Location: Initializing locations database...")
|
||||
|
||||
# Check if table exists and get its structure
|
||||
c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='locations'")
|
||||
table_exists = c.fetchone() is not None
|
||||
|
||||
if table_exists:
|
||||
# Check if is_public column exists
|
||||
c.execute("PRAGMA table_info(locations)")
|
||||
columns_info = c.fetchall()
|
||||
column_names = [col[1] for col in columns_info]
|
||||
|
||||
# Check for UNIQUE constraint on location_name by examining the table schema
|
||||
c.execute("SELECT sql FROM sqlite_master WHERE type='table' AND name='locations'")
|
||||
table_sql = c.fetchone()
|
||||
has_unique_constraint = False
|
||||
if table_sql and table_sql[0]:
|
||||
# Check if UNIQUE constraint exists in the table definition
|
||||
if 'UNIQUE' in table_sql[0].upper() and 'location_name' in table_sql[0]:
|
||||
has_unique_constraint = True
|
||||
|
||||
# If UNIQUE constraint exists, we need to recreate the table
|
||||
if has_unique_constraint:
|
||||
logger.debug("Location: Removing UNIQUE constraint from locations table")
|
||||
# Create temporary table without UNIQUE constraint
|
||||
c.execute('''CREATE TABLE locations_new
|
||||
(location_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
location_name TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
description TEXT,
|
||||
userID TEXT,
|
||||
is_public INTEGER DEFAULT 0,
|
||||
created_date TEXT,
|
||||
created_time TEXT)''')
|
||||
|
||||
# Copy data from old table to new table
|
||||
c.execute('''INSERT INTO locations_new
|
||||
(location_id, location_name, latitude, longitude, description, userID,
|
||||
is_public, created_date, created_time)
|
||||
SELECT location_id, location_name, latitude, longitude, description, userID,
|
||||
COALESCE(is_public, 0), created_date, created_time
|
||||
FROM locations''')
|
||||
|
||||
# Drop old table
|
||||
c.execute("DROP TABLE locations")
|
||||
|
||||
# Rename new table
|
||||
c.execute("ALTER TABLE locations_new RENAME TO locations")
|
||||
|
||||
logger.debug("Location: Successfully removed UNIQUE constraint")
|
||||
|
||||
# Refresh column list after table recreation
|
||||
c.execute("PRAGMA table_info(locations)")
|
||||
columns_info = c.fetchall()
|
||||
column_names = [col[1] for col in columns_info]
|
||||
|
||||
# Add is_public column if it doesn't exist (migration)
|
||||
if 'is_public' not in column_names:
|
||||
try:
|
||||
c.execute('''ALTER TABLE locations ADD COLUMN is_public INTEGER DEFAULT 0''')
|
||||
logger.debug("Location: Added is_public column to locations table")
|
||||
except sqlite3.OperationalError:
|
||||
# Column might already exist, ignore
|
||||
pass
|
||||
|
||||
# Add altitude column if it doesn't exist (migration)
|
||||
if 'altitude' not in column_names:
|
||||
try:
|
||||
c.execute('''ALTER TABLE locations ADD COLUMN altitude REAL''')
|
||||
logger.debug("Location: Added altitude column to locations table")
|
||||
except sqlite3.OperationalError:
|
||||
# Column might already exist, ignore
|
||||
pass
|
||||
else:
|
||||
# Table doesn't exist, create it without UNIQUE constraint
|
||||
c.execute('''CREATE TABLE locations
|
||||
(location_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
location_name TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
description TEXT,
|
||||
userID TEXT,
|
||||
is_public INTEGER DEFAULT 0,
|
||||
created_date TEXT,
|
||||
created_time TEXT)''')
|
||||
|
||||
# Create index for faster lookups (non-unique)
|
||||
c.execute('''CREATE INDEX IF NOT EXISTS idx_location_name_user
|
||||
ON locations(location_name, userID, is_public)''')
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Location: Failed to initialize locations database: {e}")
|
||||
return False
|
||||
|
||||
def save_location_to_db(location_name, lat, lon, description="", userID="", is_public=False, altitude=None):
|
||||
"""Save a location to the SQLite database
|
||||
|
||||
Returns:
|
||||
(success, message, conflict_info)
|
||||
conflict_info is None if no conflict, or dict with conflict details if conflict exists
|
||||
"""
|
||||
try:
|
||||
if not location_name or not location_name.strip():
|
||||
return False, "Location name cannot be empty", None
|
||||
|
||||
# Check if public locations are admin-only and user is not admin
|
||||
if is_public and get_public_location_admin_manage():
|
||||
from modules.system import isNodeAdmin
|
||||
if not isNodeAdmin(userID):
|
||||
return False, "Only admins can save public locations.", None
|
||||
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
|
||||
location_name_clean = location_name.strip()
|
||||
|
||||
# Check for conflicts
|
||||
# 1. Check if user already has a location with this name (private or public)
|
||||
c.execute('''SELECT location_id, is_public FROM locations
|
||||
WHERE location_name = ? AND userID = ?''',
|
||||
(location_name_clean, userID))
|
||||
user_existing = c.fetchone()
|
||||
|
||||
if user_existing:
|
||||
conn.close()
|
||||
return False, f"Location '{location_name}' already exists for your node", None
|
||||
|
||||
# 2. Check if there's a public location with this name
|
||||
# Note: We allow public locations to overlap with other users' private locations
|
||||
# Only check for existing public locations to prevent duplicate public locations
|
||||
c.execute('''SELECT location_id, userID, description FROM locations
|
||||
WHERE location_name = ? AND is_public = 1''',
|
||||
(location_name_clean,))
|
||||
public_existing = c.fetchone()
|
||||
|
||||
if public_existing:
|
||||
if not is_public:
|
||||
# User is trying to create private location but public one exists
|
||||
# They can use "map public <name>" to access the public location
|
||||
conn.close()
|
||||
return False, f"Public location '{location_name}' already exists. Use 'map public {location_name}' to access it.", None
|
||||
else:
|
||||
# User is trying to create public location but one already exists
|
||||
# Only one public location per name globally
|
||||
conn.close()
|
||||
return False, f"Public location '{location_name}' already exists", None
|
||||
|
||||
# 3. If saving as public, we don't check for other users' private locations
|
||||
# This allows public locations to overlap with private location names
|
||||
|
||||
# Insert new location
|
||||
now = datetime.now()
|
||||
c.execute('''INSERT INTO locations
|
||||
(location_name, latitude, longitude, altitude, description, userID, is_public, created_date, created_time)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)''',
|
||||
(location_name_clean, lat, lon, altitude, description, userID, 1 if is_public else 0,
|
||||
now.strftime("%Y-%m-%d"), now.strftime("%H:%M:%S")))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
visibility = "public" if is_public else "private"
|
||||
logger.debug(f"Location: Saved {visibility} location '{location_name}' to database")
|
||||
return True, f"Location '{location_name}' saved as {visibility}", None
|
||||
except Exception as e:
|
||||
logger.error(f"Location: Failed to save location: {e}")
|
||||
return False, f"Error saving location: {e}", None
|
||||
|
||||
def get_location_from_db(location_name, userID=None):
|
||||
"""Retrieve a location from the database by name
|
||||
|
||||
Returns:
|
||||
- User's private location if exists
|
||||
- Public location if exists
|
||||
- None if not found
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
location_name_clean = location_name.strip()
|
||||
|
||||
# First, try to get user's private location
|
||||
if userID:
|
||||
c.execute('''SELECT location_name, latitude, longitude, altitude, description, userID, is_public, created_date, created_time
|
||||
FROM locations
|
||||
WHERE location_name = ? AND userID = ? AND is_public = 0''',
|
||||
(location_name_clean, userID))
|
||||
result = c.fetchone()
|
||||
if result:
|
||||
conn.close()
|
||||
return {
|
||||
'name': result[0],
|
||||
'lat': result[1],
|
||||
'lon': result[2],
|
||||
'altitude': result[3],
|
||||
'description': result[4],
|
||||
'userID': result[5],
|
||||
'is_public': bool(result[6]),
|
||||
'created_date': result[7],
|
||||
'created_time': result[8]
|
||||
}
|
||||
|
||||
# Then try public location
|
||||
c.execute('''SELECT location_name, latitude, longitude, altitude, description, userID, is_public, created_date, created_time
|
||||
FROM locations
|
||||
WHERE location_name = ? AND is_public = 1''',
|
||||
(location_name_clean,))
|
||||
result = c.fetchone()
|
||||
conn.close()
|
||||
|
||||
if result:
|
||||
return {
|
||||
'name': result[0],
|
||||
'lat': result[1],
|
||||
'lon': result[2],
|
||||
'altitude': result[3],
|
||||
'description': result[4],
|
||||
'userID': result[5],
|
||||
'is_public': bool(result[6]),
|
||||
'created_date': result[7],
|
||||
'created_time': result[8]
|
||||
}
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Location: Failed to retrieve location: {e}")
|
||||
return None
|
||||
|
||||
def get_public_location_from_db(location_name):
|
||||
"""Retrieve only a public location from the database by name (ignores private locations)
|
||||
|
||||
Returns:
|
||||
- Public location if exists
|
||||
- None if not found
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
location_name_clean = location_name.strip()
|
||||
|
||||
# Get only public location
|
||||
c.execute('''SELECT location_name, latitude, longitude, altitude, description, userID, is_public, created_date, created_time
|
||||
FROM locations
|
||||
WHERE location_name = ? AND is_public = 1''',
|
||||
(location_name_clean,))
|
||||
result = c.fetchone()
|
||||
conn.close()
|
||||
|
||||
if result:
|
||||
return {
|
||||
'name': result[0],
|
||||
'lat': result[1],
|
||||
'lon': result[2],
|
||||
'altitude': result[3],
|
||||
'description': result[4],
|
||||
'userID': result[5],
|
||||
'is_public': bool(result[6]),
|
||||
'created_date': result[7],
|
||||
'created_time': result[8]
|
||||
}
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Location: Failed to retrieve public location: {e}")
|
||||
return None
|
||||
|
||||
def list_locations_from_db(userID=None):
|
||||
"""List saved locations
|
||||
|
||||
Shows:
|
||||
- User's private locations
|
||||
- All public locations
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
|
||||
if userID:
|
||||
# Get user's private locations and all public locations
|
||||
c.execute('''SELECT location_name, latitude, longitude, altitude, description, is_public, created_date
|
||||
FROM locations
|
||||
WHERE (userID = ? AND is_public = 0) OR is_public = 1
|
||||
ORDER BY is_public ASC, location_name''', (userID,))
|
||||
else:
|
||||
# Get all public locations only
|
||||
c.execute('''SELECT location_name, latitude, longitude, altitude, description, is_public, created_date
|
||||
FROM locations
|
||||
WHERE is_public = 1
|
||||
ORDER BY location_name''')
|
||||
|
||||
results = c.fetchall() # Get ALL results, no limit
|
||||
conn.close()
|
||||
|
||||
if not results:
|
||||
return "No saved locations found"
|
||||
|
||||
locations_list = f"Saved Locations ({len(results)} total):\n"
|
||||
# Return ALL results, not limited
|
||||
for result in results:
|
||||
is_public = bool(result[5])
|
||||
visibility = "🌐Public" if is_public else "🔒Private"
|
||||
locations_list += f" • {result[0]} ({result[1]:.5f}, {result[2]:.5f})"
|
||||
if result[3] is not None: # altitude
|
||||
locations_list += f" @ {result[3]:.1f}m"
|
||||
locations_list += f" [{visibility}]"
|
||||
if result[4]: # description
|
||||
locations_list += f" - {result[4]}"
|
||||
locations_list += "\n"
|
||||
return locations_list.strip()
|
||||
except Exception as e:
|
||||
logger.error(f"Location: Failed to list locations: {e}")
|
||||
return f"Error listing locations: {e}"
|
||||
|
||||
def delete_location_from_db(location_name, userID=""):
|
||||
"""Delete a location from the database
|
||||
|
||||
Returns:
|
||||
(success, message)
|
||||
"""
|
||||
try:
|
||||
if not location_name or not location_name.strip():
|
||||
return False, "Location name cannot be empty"
|
||||
|
||||
conn = sqlite3.connect(my_settings.locations_db)
|
||||
c = conn.cursor()
|
||||
location_name_clean = location_name.strip()
|
||||
|
||||
# Check if location exists - prioritize user's private location, then public
|
||||
# First try to get user's private location
|
||||
c.execute('''SELECT location_id, userID, is_public FROM locations
|
||||
WHERE location_name = ? AND userID = ? AND is_public = 0''',
|
||||
(location_name_clean, userID))
|
||||
location = c.fetchone()
|
||||
|
||||
# If not found, try public location
|
||||
if not location:
|
||||
c.execute('''SELECT location_id, userID, is_public FROM locations
|
||||
WHERE location_name = ? AND is_public = 1''',
|
||||
(location_name_clean,))
|
||||
location = c.fetchone()
|
||||
|
||||
# If still not found, try any location (for admin delete)
|
||||
if not location:
|
||||
c.execute('''SELECT location_id, userID, is_public FROM locations
|
||||
WHERE location_name = ? LIMIT 1''',
|
||||
(location_name_clean,))
|
||||
location = c.fetchone()
|
||||
|
||||
if not location:
|
||||
conn.close()
|
||||
return False, f"Location '{location_name}' not found"
|
||||
|
||||
location_id, location_userID, is_public = location
|
||||
|
||||
# Check permissions
|
||||
# Users can only delete their own private locations
|
||||
# Admins can delete any location if delete_public_locations_admins_only is enabled
|
||||
is_admin = False
|
||||
if get_delete_public_locations_admins_only():
|
||||
from modules.system import isNodeAdmin
|
||||
is_admin = isNodeAdmin(userID)
|
||||
|
||||
# Check if user owns this location
|
||||
is_owner = (str(location_userID) == str(userID))
|
||||
|
||||
# Determine if deletion is allowed
|
||||
can_delete = False
|
||||
if is_public:
|
||||
# Public locations: only admins can delete if admin-only is enabled
|
||||
if get_delete_public_locations_admins_only():
|
||||
can_delete = is_admin
|
||||
else:
|
||||
# If not admin-only, then anyone can delete public locations
|
||||
can_delete = True
|
||||
else:
|
||||
# Private locations: owner can always delete
|
||||
can_delete = is_owner
|
||||
|
||||
if not can_delete:
|
||||
conn.close()
|
||||
if is_public and get_delete_public_locations_admins_only():
|
||||
return False, "Only admins can delete public locations."
|
||||
else:
|
||||
return False, f"You can only delete your own locations. This location belongs to another user."
|
||||
|
||||
# Delete the location
|
||||
c.execute('''DELETE FROM locations WHERE location_id = ?''', (location_id,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
visibility = "public" if is_public else "private"
|
||||
logger.debug(f"Location: Deleted {visibility} location '{location_name}' from database")
|
||||
return True, f"Location '{location_name}' deleted"
|
||||
except Exception as e:
|
||||
logger.error(f"Location: Failed to delete location: {e}")
|
||||
return False, f"Error deleting location: {e}"
|
||||
|
||||
def calculate_heading_and_distance(lat1, lon1, lat2, lon2):
|
||||
"""Calculate heading (bearing) and distance between two points"""
|
||||
if lat1 == 0 and lon1 == 0:
|
||||
return None, None, "Current location not available"
|
||||
if lat2 == 0 and lon2 == 0:
|
||||
return None, None, "Target location not available"
|
||||
|
||||
# Calculate distance using Haversine formula
|
||||
r = 6371 # Earth radius in kilometers
|
||||
lat1_rad = math.radians(lat1)
|
||||
lon1_rad = math.radians(lon1)
|
||||
lat2_rad = math.radians(lat2)
|
||||
lon2_rad = math.radians(lon2)
|
||||
|
||||
dlon = lon2_rad - lon1_rad
|
||||
dlat = lat2_rad - lat1_rad
|
||||
|
||||
a = math.sin(dlat / 2)**2 + math.cos(lat1_rad) * math.cos(lat2_rad) * math.sin(dlon / 2)**2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
distance_km = c * r
|
||||
|
||||
# Calculate bearing
|
||||
x = math.sin(dlon) * math.cos(lat2_rad)
|
||||
y = math.cos(lat1_rad) * math.sin(lat2_rad) - (math.sin(lat1_rad) * math.cos(lat2_rad) * math.cos(dlon))
|
||||
initial_bearing = math.atan2(x, y)
|
||||
initial_bearing = math.degrees(initial_bearing)
|
||||
compass_bearing = (initial_bearing + 360) % 360
|
||||
|
||||
return compass_bearing, distance_km, None
|
||||
|
||||
def log_locationData_toMap(userID, location, message):
|
||||
"""
|
||||
Logs location data to a CSV file for meshing purposes.
|
||||
@@ -1207,40 +1721,277 @@ def mapHandler(userID, deviceID, channel_number, message, snr, rssi, hop):
|
||||
"""
|
||||
Handles 'map' commands from meshbot.
|
||||
Usage:
|
||||
map <description text> - Log current location with description
|
||||
map save <name> [description] - Save current location with a name
|
||||
map save public <name> [desc] - Save public location (all can see)
|
||||
map <name> - Get heading and distance to a saved location
|
||||
map public <name> - Get heading to public location (ignores private)
|
||||
map delete <name> - Delete a location
|
||||
map list - List all saved locations
|
||||
map log <description> - Log current location with description (CSV, legacy)
|
||||
"""
|
||||
command = str(command) # Ensure command is always a string
|
||||
|
||||
if command.strip().lower() == "?":
|
||||
if command.strip().lower() == "help":
|
||||
return (
|
||||
"Usage:\n"
|
||||
" 🗺️map <description text> - Log your current location with a description\n"
|
||||
"Example:\n"
|
||||
" 🗺️map Found a new mesh node near the park"
|
||||
f"'map save <name> [description]' - Save private\n"
|
||||
f"'map save public <name> [desc]' - Save public\n"
|
||||
f"'map <name>' - heading to saved\n"
|
||||
f"'map public <name>' - heading to public\n"
|
||||
f"'map delete <name>' \n"
|
||||
f"'map list' - List\n"
|
||||
f"'map log <description>' - Log CSV\n"
|
||||
)
|
||||
|
||||
description = command.strip()
|
||||
# if no description provided, set to default
|
||||
if not description:
|
||||
description = "Logged:"
|
||||
# Sanitize description for CSV injection
|
||||
if description and description[0] in ('=', '+', '-', '@'):
|
||||
description = "'" + description
|
||||
|
||||
# if there is SNR and RSSI info, append to description
|
||||
if snr is not None and rssi is not None:
|
||||
description += f" SNR:{snr}dB RSSI:{rssi}dBm"
|
||||
# Handle "save" command
|
||||
if command.lower().startswith("save "):
|
||||
save_cmd = command[5:].strip()
|
||||
is_public = False
|
||||
|
||||
# Check for "public" keyword
|
||||
if save_cmd.lower().startswith("public "):
|
||||
is_public = True
|
||||
save_cmd = save_cmd[7:].strip() # Remove "public " prefix
|
||||
|
||||
parts = save_cmd.split(" ", 1)
|
||||
if len(parts) < 1 or not parts[0]:
|
||||
if is_public:
|
||||
return "🚫Usage: map save public <name> [description]"
|
||||
else:
|
||||
return "🚫Usage: map save <name> [description]"
|
||||
|
||||
location_name = parts[0]
|
||||
description = parts[1] if len(parts) > 1 else ""
|
||||
|
||||
# Add SNR/RSSI info to description if available
|
||||
if snr is not None and rssi is not None:
|
||||
if description:
|
||||
description += f" SNR:{snr}dB RSSI:{rssi}dBm"
|
||||
else:
|
||||
description = f"SNR:{snr}dB RSSI:{rssi}dBm"
|
||||
|
||||
if hop is not None:
|
||||
if description:
|
||||
description += f" Meta:{hop}"
|
||||
else:
|
||||
description = f"Meta:{hop}"
|
||||
|
||||
if not location or len(location) != 2 or lat == 0 or lon == 0:
|
||||
return "🚫Location data is missing or invalid."
|
||||
|
||||
# Get altitude for the node
|
||||
altitude = get_node_altitude(userID, deviceID)
|
||||
|
||||
success, msg, _ = save_location_to_db(location_name, lat, lon, description, str(userID), is_public, altitude)
|
||||
|
||||
if success:
|
||||
return f"📍{msg}"
|
||||
else:
|
||||
return f"🚫{msg}"
|
||||
|
||||
# if there is hop info, append to description
|
||||
if hop is not None:
|
||||
description += f" Meta:{hop}"
|
||||
# Handle "list" command
|
||||
if command.strip().lower() == "list":
|
||||
return list_locations_from_db(str(userID))
|
||||
|
||||
# Handle "delete" command
|
||||
if command.lower().startswith("delete "):
|
||||
location_name = command[7:].strip() # Remove "delete " prefix
|
||||
if not location_name:
|
||||
return "🚫Usage: map delete <name>"
|
||||
|
||||
success, msg = delete_location_from_db(location_name, str(userID))
|
||||
if success:
|
||||
return f"🗑️{msg}"
|
||||
else:
|
||||
return f"🚫{msg}"
|
||||
|
||||
# Handle "public" command to retrieve public locations (even if user has private with same name)
|
||||
if command.lower().startswith("public "):
|
||||
location_name = command[7:].strip() # Remove "public " prefix
|
||||
if not location_name:
|
||||
return "🚫Usage: map public <name>"
|
||||
|
||||
saved_location = get_public_location_from_db(location_name)
|
||||
|
||||
if saved_location:
|
||||
# Calculate heading and distance from current location
|
||||
if not location or len(location) != 2 or lat == 0 or lon == 0:
|
||||
result = f"📍{saved_location['name']} (Public): {saved_location['lat']:.5f}, {saved_location['lon']:.5f}"
|
||||
if saved_location.get('altitude') is not None:
|
||||
result += f" @ {saved_location['altitude']:.1f}m"
|
||||
result += "\n🚫Current location not available for heading"
|
||||
return result
|
||||
|
||||
bearing, distance_km, error = calculate_heading_and_distance(
|
||||
lat, lon, saved_location['lat'], saved_location['lon']
|
||||
)
|
||||
|
||||
if error:
|
||||
return f"📍{saved_location['name']} (Public): {error}"
|
||||
|
||||
# Format distance
|
||||
if my_settings.use_metric:
|
||||
distance_str = f"{distance_km:.2f} km"
|
||||
else:
|
||||
distance_miles = distance_km * 0.621371
|
||||
if distance_miles < 0.25:
|
||||
# Convert to feet for short distances
|
||||
distance_feet = distance_miles * 5280
|
||||
distance_str = f"{distance_feet:.0f} ft"
|
||||
else:
|
||||
distance_str = f"{distance_miles:.2f} miles"
|
||||
|
||||
# Format bearing with cardinal direction
|
||||
bearing_rounded = round(bearing)
|
||||
cardinal = ""
|
||||
if bearing_rounded == 0 or bearing_rounded == 360:
|
||||
cardinal = "N"
|
||||
elif bearing_rounded == 90:
|
||||
cardinal = "E"
|
||||
elif bearing_rounded == 180:
|
||||
cardinal = "S"
|
||||
elif bearing_rounded == 270:
|
||||
cardinal = "W"
|
||||
elif 0 < bearing_rounded < 90:
|
||||
cardinal = "NE"
|
||||
elif 90 < bearing_rounded < 180:
|
||||
cardinal = "SE"
|
||||
elif 180 < bearing_rounded < 270:
|
||||
cardinal = "SW"
|
||||
elif 270 < bearing_rounded < 360:
|
||||
cardinal = "NW"
|
||||
|
||||
result = f"📍{saved_location['name']} (Public)\n"
|
||||
result += f"🧭Heading: {bearing_rounded}° {cardinal}\n"
|
||||
result += f"📏Distance: {distance_str}"
|
||||
|
||||
# Calculate altitude difference if both are available
|
||||
current_altitude = get_node_altitude(userID, deviceID)
|
||||
saved_altitude = saved_location.get('altitude')
|
||||
if current_altitude is not None and saved_altitude is not None:
|
||||
altitude_diff_m = saved_altitude - current_altitude # message altitude - DB altitude
|
||||
altitude_diff_ft = altitude_diff_m * 3.28084 # Convert meters to feet
|
||||
altitude_diff_ft_rounded = round(altitude_diff_ft) # Round to nearest foot
|
||||
if altitude_diff_ft_rounded > 0:
|
||||
result += f"\n⛰️Altitude: +{altitude_diff_ft_rounded}ft" # Message is higher
|
||||
elif altitude_diff_ft_rounded < 0:
|
||||
result += f"\n⛰️Altitude: {altitude_diff_ft_rounded}ft" # Message is lower (negative already has -)
|
||||
else:
|
||||
result += f"\n⛰️Altitude: ±0ft"
|
||||
|
||||
if saved_location['description']:
|
||||
result += f"\n📝{saved_location['description']}"
|
||||
return result
|
||||
else:
|
||||
return f"🚫Public location '{location_name}' not found."
|
||||
|
||||
# Handle "log" command for CSV logging
|
||||
if command.lower().startswith("log "):
|
||||
description = command[4:].strip() # Remove "log " prefix
|
||||
# if no description provided, set to default
|
||||
if not description:
|
||||
description = "Logged:"
|
||||
# Sanitize description for CSV injection
|
||||
if description and description[0] in ('=', '+', '-', '@'):
|
||||
description = "'" + description
|
||||
|
||||
# location should be a tuple: (lat, lon)
|
||||
if not location or len(location) != 2:
|
||||
return "🚫Location data is missing or invalid."
|
||||
# if there is SNR and RSSI info, append to description
|
||||
if snr is not None and rssi is not None:
|
||||
description += f" SNR:{snr}dB RSSI:{rssi}dBm"
|
||||
|
||||
# if there is hop info, append to description
|
||||
if hop is not None:
|
||||
description += f" Meta:{hop}"
|
||||
|
||||
success = log_locationData_toMap(userID, location, description)
|
||||
if success:
|
||||
return f"📍Location logged "
|
||||
else:
|
||||
return "🚫Failed to log location. Please try again."
|
||||
# location should be a tuple: (lat, lon)
|
||||
if not location or len(location) != 2:
|
||||
return "🚫Location data is missing or invalid."
|
||||
|
||||
success = log_locationData_toMap(userID, location, description)
|
||||
if success:
|
||||
return f"📍Location logged (CSV)"
|
||||
else:
|
||||
return "🚫Failed to log location. Please try again."
|
||||
|
||||
# Handle location name lookup (get heading)
|
||||
if command.strip():
|
||||
location_name = command.strip()
|
||||
saved_location = get_location_from_db(location_name, str(userID))
|
||||
|
||||
if saved_location:
|
||||
# Calculate heading and distance from current location
|
||||
if not location or len(location) != 2 or lat == 0 or lon == 0:
|
||||
result = f"📍{saved_location['name']}: {saved_location['lat']:.5f}, {saved_location['lon']:.5f}"
|
||||
if saved_location.get('altitude') is not None:
|
||||
result += f" @ {saved_location['altitude']:.1f}m"
|
||||
result += "\n🚫Current location not available for heading"
|
||||
return result
|
||||
|
||||
bearing, distance_km, error = calculate_heading_and_distance(
|
||||
lat, lon, saved_location['lat'], saved_location['lon']
|
||||
)
|
||||
|
||||
if error:
|
||||
return f"📍{saved_location['name']}: {error}"
|
||||
|
||||
# Format distance
|
||||
if my_settings.use_metric:
|
||||
distance_str = f"{distance_km:.2f} km"
|
||||
else:
|
||||
distance_miles = distance_km * 0.621371
|
||||
if distance_miles < 0.25:
|
||||
# Convert to feet for short distances
|
||||
distance_feet = distance_miles * 5280
|
||||
distance_str = f"{distance_feet:.0f} ft"
|
||||
else:
|
||||
distance_str = f"{distance_miles:.2f} miles"
|
||||
|
||||
# Format bearing with cardinal direction
|
||||
bearing_rounded = round(bearing)
|
||||
cardinal = ""
|
||||
if bearing_rounded == 0 or bearing_rounded == 360:
|
||||
cardinal = "N"
|
||||
elif bearing_rounded == 90:
|
||||
cardinal = "E"
|
||||
elif bearing_rounded == 180:
|
||||
cardinal = "S"
|
||||
elif bearing_rounded == 270:
|
||||
cardinal = "W"
|
||||
elif 0 < bearing_rounded < 90:
|
||||
cardinal = "NE"
|
||||
elif 90 < bearing_rounded < 180:
|
||||
cardinal = "SE"
|
||||
elif 180 < bearing_rounded < 270:
|
||||
cardinal = "SW"
|
||||
elif 270 < bearing_rounded < 360:
|
||||
cardinal = "NW"
|
||||
|
||||
result = f"📍{saved_location['name']}\n"
|
||||
result += f"🧭Heading: {bearing_rounded}° {cardinal}\n"
|
||||
result += f"📏Distance: {distance_str}"
|
||||
|
||||
# Calculate altitude difference if both are available
|
||||
current_altitude = get_node_altitude(userID, deviceID)
|
||||
saved_altitude = saved_location.get('altitude')
|
||||
if current_altitude is not None and saved_altitude is not None:
|
||||
altitude_diff_m = saved_altitude - current_altitude # message altitude - DB altitude
|
||||
altitude_diff_ft = altitude_diff_m * 3.28084 # Convert meters to feet
|
||||
altitude_diff_ft_rounded = round(altitude_diff_ft) # Round to nearest foot
|
||||
if altitude_diff_ft_rounded > 0:
|
||||
result += f"\n⛰️Altitude: +{altitude_diff_ft_rounded}ft" # Message is higher
|
||||
elif altitude_diff_ft_rounded < 0:
|
||||
result += f"\n⛰️Altitude: {altitude_diff_ft_rounded}ft" # Message is lower (negative already has -)
|
||||
else:
|
||||
result += f"\n⛰️Altitude: ±0ft"
|
||||
|
||||
if saved_location['description']:
|
||||
result += f"\n📝{saved_location['description']}"
|
||||
return result
|
||||
else:
|
||||
# Location not found
|
||||
return f"🚫Location '{location_name}' not found. Use 'map list' to see available locations."
|
||||
|
||||
# Empty command - show help
|
||||
return "🗺️Use 'map help' for help"
|
||||
|
||||
# Initialize the locations database when module is imported
|
||||
initialize_locations_database()
|
||||
|
||||
+17
-6
@@ -5,6 +5,7 @@ import schedule
|
||||
from datetime import datetime
|
||||
from modules.log import logger
|
||||
from modules.system import send_message
|
||||
from modules.settings import MOTD, schedulerMotd, schedulerMessage, schedulerChannel, schedulerInterface, schedulerValue, schedulerTime, schedulerInterval
|
||||
|
||||
async def run_scheduler_loop(interval=1):
|
||||
logger.debug(f"System: Scheduler loop started Tasks: {len(schedule.jobs)}, Details:{extract_schedule_fields(schedule.get_jobs())}")
|
||||
@@ -24,11 +25,12 @@ async def run_scheduler_loop(interval=1):
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("System: Scheduler loop cancelled, shutting down.")
|
||||
|
||||
def safe_int(val, default=0, type=""):
|
||||
def safe_int(val, default=0, type=''):
|
||||
try:
|
||||
return int(val)
|
||||
except (ValueError, TypeError):
|
||||
logger.debug(f"System: Scheduler config {type} error '{val}' to int, using default {default}")
|
||||
if val != '':
|
||||
logger.debug(f"System: Scheduler config {type} error '{val}' to int, using default {default}")
|
||||
return default
|
||||
|
||||
def extract_schedule_fields(jobs):
|
||||
@@ -102,14 +104,23 @@ def setup_scheduler(
|
||||
|
||||
# Basic Scheduler Options
|
||||
basicOptions = ['day', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', 'hour', 'min']
|
||||
effective_interval = schedulerIntervalInt
|
||||
if any(option in schedulerValue for option in basicOptions):
|
||||
if schedulerValue == 'day':
|
||||
day_interval = safe_int(schedulerInterval, 1, type="interval")
|
||||
if day_interval < 1:
|
||||
logger.debug(f"System: Scheduler config interval '{schedulerInterval}' invalid for day schedule, using default 1")
|
||||
day_interval = 1
|
||||
effective_interval = day_interval
|
||||
if schedulerTime:
|
||||
# Specific time each day
|
||||
schedule.every().day.at(schedulerTime).do(send_sched_msg)
|
||||
# Specific time at a daily or multi-day interval
|
||||
if day_interval == 1:
|
||||
schedule.every().day.at(schedulerTime).do(send_sched_msg)
|
||||
else:
|
||||
schedule.every(day_interval).days.at(schedulerTime).do(send_sched_msg)
|
||||
else:
|
||||
# Every N days
|
||||
schedule.every(schedulerIntervalInt).days.do(send_sched_msg)
|
||||
schedule.every(day_interval).days.do(send_sched_msg)
|
||||
elif 'mon' in schedulerValue and schedulerTime:
|
||||
schedule.every().monday.at(schedulerTime).do(send_sched_msg)
|
||||
elif 'tue' in schedulerValue and schedulerTime:
|
||||
@@ -128,7 +139,7 @@ def setup_scheduler(
|
||||
schedule.every(schedulerIntervalInt).hours.do(send_sched_msg)
|
||||
elif 'min' in schedulerValue:
|
||||
schedule.every(schedulerIntervalInt).minutes.do(send_sched_msg)
|
||||
logger.debug(f"System: Starting the basic scheduler to send '{scheduler_message}' on schedule '{schedulerValue}' every {schedulerIntervalInt} interval at time '{schedulerTime}' on Device:{schedulerInterface} Channel:{schedulerChannel}")
|
||||
logger.debug(f"System: Starting the basic scheduler to send '{scheduler_message}' on schedule '{schedulerValue}' every {effective_interval} interval at time '{schedulerTime}' on Device:{schedulerInterface} Channel:{schedulerChannel}")
|
||||
elif 'joke' in schedulerValue:
|
||||
schedule.every(schedulerIntervalInt).minutes.do(
|
||||
lambda: send_message(tell_joke(), schedulerChannel, 0, schedulerInterface)
|
||||
|
||||
@@ -135,6 +135,10 @@ if 'inventory' not in config:
|
||||
config['inventory'] = {'enabled': 'False', 'inventory_db': 'data/inventory.db', 'disable_penny': 'False'}
|
||||
config.write(open(config_file, 'w'))
|
||||
|
||||
if 'location' not in config:
|
||||
config['location'] = {'locations_db': 'data/locations.db', 'public_location_admin_manage': 'False', 'delete_public_locations_admins_only': 'False'}
|
||||
config.write(open(config_file, 'w'))
|
||||
|
||||
# interface1 settings
|
||||
interface1_type = config['interface'].get('type', 'serial')
|
||||
port1 = config['interface'].get('port', '')
|
||||
@@ -323,6 +327,9 @@ try:
|
||||
coastalForecastDays = config['location'].getint('coastalForecastDays', 3) # default 3 days
|
||||
|
||||
# location alerts
|
||||
alert_duration = config['location'].getint('alertDuration', 20) # default 20 minutes
|
||||
if alert_duration < 10: # the API calls need throttle time
|
||||
alert_duration = 10
|
||||
eAlertBroadcastEnabled = config['location'].getboolean('eAlertBroadcastEnabled', False) # old deprecated name
|
||||
ipawsAlertEnabled = config['location'].getboolean('ipawsAlertEnabled', False) # default False new ^
|
||||
# Keep both in sync for backward compatibility
|
||||
@@ -390,6 +397,11 @@ try:
|
||||
inventory_db = config['inventory'].get('inventory_db', 'data/inventory.db')
|
||||
disable_penny = config['inventory'].getboolean('disable_penny', False)
|
||||
|
||||
# location mapping
|
||||
locations_db = config['location'].get('locations_db', 'data/locations.db')
|
||||
public_location_admin_manage = config['location'].getboolean('public_location_admin_manage', False)
|
||||
delete_public_locations_admins_only = config['location'].getboolean('delete_public_locations_admins_only', False)
|
||||
|
||||
# E-Mail Settings
|
||||
sysopEmails = config['smtp'].get('sysopEmails', '').split(',')
|
||||
enableSMTP = config['smtp'].getboolean('enableSMTP', False)
|
||||
@@ -504,6 +516,10 @@ try:
|
||||
autoBanThreshold = config['messagingSettings'].getint('autoBanThreshold', 5) # default 5 offenses
|
||||
autoBanTimeframe = config['messagingSettings'].getint('autoBanTimeframe', 3600) # default 1 hour in seconds
|
||||
apiThrottleValue = config['messagingSettings'].getint('apiThrottleValue', 20) # default 20 requests
|
||||
|
||||
# data persistence settings
|
||||
dataPersistence_enabled = config.getboolean('dataPersistence', 'enabled', fallback=True) # default True
|
||||
dataPersistence_interval = config.getint('dataPersistence', 'interval', fallback=300) # default 300 seconds (5 minutes)
|
||||
except Exception as e:
|
||||
print(f"System: Error reading config file: {e}")
|
||||
print("System: Check the config.ini against config.template file for missing sections or values.")
|
||||
|
||||
+7
-1
@@ -37,6 +37,12 @@ def hf_band_conditions():
|
||||
def solar_conditions():
|
||||
# radio related solar conditions from hamsql.com
|
||||
solar_cond = ""
|
||||
solar_a_index = ""
|
||||
solar_k_index = ""
|
||||
solar_xray = ""
|
||||
solar_flux = ""
|
||||
sunspots = ""
|
||||
signalnoise = ""
|
||||
try:
|
||||
solar_cond = requests.get("https://www.hamqsl.com/solarxml.php", timeout=urlTimeoutSeconds)
|
||||
if solar_cond.ok:
|
||||
@@ -52,7 +58,7 @@ def solar_conditions():
|
||||
solar_flux = i.getElementsByTagName("solarflux")[0].childNodes[0].data
|
||||
sunspots = i.getElementsByTagName("sunspots")[0].childNodes[0].data
|
||||
signalnoise = i.getElementsByTagName("signalnoise")[0].childNodes[0].data
|
||||
solar_cond = "A-Index: " + solar_a_index + "\nK-Index: " + solar_k_index + "\nSunspots: " + sunspots + "\nX-Ray Flux: " + solar_xray + "\nSolar Flux: " + solar_flux + "\nSignal Noise: " + signalnoise
|
||||
solar_cond = "A: " + solar_a_index + "\nK: " + solar_k_index + "\nSunspots: " + sunspots + "\nX-Ray Flux: " + solar_xray + "\nSolar Flux: " + solar_flux + "\nNoise: " + signalnoise
|
||||
else:
|
||||
logger.error("Solar: Error fetching solar conditions")
|
||||
solar_cond = ERROR_FETCHING_DATA
|
||||
|
||||
+121
-26
@@ -852,7 +852,7 @@ def messageChunker(message):
|
||||
except Exception as e:
|
||||
logger.warning(f"System: Exception during message chunking: {e} (message length: {len(message)})")
|
||||
|
||||
def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False):
|
||||
def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False, reply_id=None):
|
||||
# Send a message to a channel or DM
|
||||
interface = globals()[f'interface{nodeInt}']
|
||||
# Check if the message is empty
|
||||
@@ -860,6 +860,28 @@ def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False):
|
||||
return False
|
||||
|
||||
try:
|
||||
def _send_with_reply(**kwargs):
|
||||
# For threaded replies, send as DATA payload to match Meshtastic inline-reply behavior. no API call today.
|
||||
if reply_id is not None:
|
||||
text_payload = kwargs.pop('text', '')
|
||||
if isinstance(text_payload, str):
|
||||
raw_payload = text_payload.encode('utf-8')
|
||||
else:
|
||||
raw_payload = text_payload
|
||||
|
||||
data_kwargs = {
|
||||
# 1 == TEXT_MESSAGE_APP, required so clients render payload as chat text.
|
||||
'portNum': 1,
|
||||
'channelIndex': kwargs.get('channelIndex', ch),
|
||||
'wantAck': kwargs.get('wantAck', wantAck),
|
||||
}
|
||||
if kwargs.get('destinationId'):
|
||||
data_kwargs['destinationId'] = kwargs.get('destinationId')
|
||||
# send the data payload with the replyId for threading
|
||||
return interface.sendData(raw_payload, replyId=reply_id, **data_kwargs)
|
||||
# Otherwise, send as normal text message
|
||||
return interface.sendText(**kwargs)
|
||||
|
||||
# Force chunking and log if message exceeds maxBuffer
|
||||
if len(message.encode('utf-8')) > maxBuffer:
|
||||
logger.debug(f"System: Message length {len(message.encode('utf-8'))} exceeds maxBuffer{maxBuffer}, forcing chunking.")
|
||||
@@ -880,20 +902,20 @@ def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False):
|
||||
# Send to channel
|
||||
if wantAck:
|
||||
logger.info(f"Device:{nodeInt} Channel:{ch} " + CustomFormatter.red + f"req.ACK " + f"Chunker{chunkOf} SendingChannel: " + CustomFormatter.white + m.replace('\n', ' '))
|
||||
interface.sendText(text=m, channelIndex=ch, wantAck=True)
|
||||
_send_with_reply(text=m, channelIndex=ch, wantAck=True)
|
||||
else:
|
||||
logger.info(f"Device:{nodeInt} Channel:{ch} " + CustomFormatter.red + f"Chunker{chunkOf} SendingChannel: " + CustomFormatter.white + m.replace('\n', ' '))
|
||||
interface.sendText(text=m, channelIndex=ch)
|
||||
_send_with_reply(text=m, channelIndex=ch)
|
||||
else:
|
||||
# Send to DM
|
||||
if wantAck:
|
||||
logger.info(f"Device:{nodeInt} " + CustomFormatter.red + f"req.ACK " + f"Chunker{chunkOf} Sending DM: " + CustomFormatter.white + m.replace('\n', ' ') + CustomFormatter.purple +\
|
||||
" To: " + CustomFormatter.white + f"{get_name_from_number(nodeid, 'long', nodeInt)}")
|
||||
interface.sendText(text=m, channelIndex=ch, destinationId=nodeid, wantAck=True)
|
||||
_send_with_reply(text=m, channelIndex=ch, destinationId=nodeid, wantAck=True)
|
||||
else:
|
||||
logger.info(f"Device:{nodeInt} " + CustomFormatter.red + f"Chunker{chunkOf} Sending DM: " + CustomFormatter.white + m.replace('\n', ' ') + CustomFormatter.purple +\
|
||||
" To: " + CustomFormatter.white + f"{get_name_from_number(nodeid, 'long', nodeInt)}")
|
||||
interface.sendText(text=m, channelIndex=ch, destinationId=nodeid)
|
||||
_send_with_reply(text=m, channelIndex=ch, destinationId=nodeid)
|
||||
|
||||
# Throttle the message sending to prevent spamming the device
|
||||
if (message_list.index(m)+1) % 4 == 0:
|
||||
@@ -908,20 +930,20 @@ def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False):
|
||||
# Send to channel
|
||||
if wantAck:
|
||||
logger.info(f"Device:{nodeInt} Channel:{ch} " + CustomFormatter.red + "req.ACK " + "SendingChannel: " + CustomFormatter.white + message.replace('\n', ' '))
|
||||
interface.sendText(text=message, channelIndex=ch, wantAck=True)
|
||||
_send_with_reply(text=message, channelIndex=ch, wantAck=True)
|
||||
else:
|
||||
logger.info(f"Device:{nodeInt} Channel:{ch} " + CustomFormatter.red + "SendingChannel: " + CustomFormatter.white + message.replace('\n', ' '))
|
||||
interface.sendText(text=message, channelIndex=ch)
|
||||
_send_with_reply(text=message, channelIndex=ch)
|
||||
else:
|
||||
# Send to DM
|
||||
if wantAck:
|
||||
logger.info(f"Device:{nodeInt} " + CustomFormatter.red + "req.ACK " + "Sending DM: " + CustomFormatter.white + message.replace('\n', ' ') + CustomFormatter.purple +\
|
||||
" To: " + CustomFormatter.white + f"{get_name_from_number(nodeid, 'long', nodeInt)}")
|
||||
interface.sendText(text=message, channelIndex=ch, destinationId=nodeid, wantAck=True)
|
||||
_send_with_reply(text=message, channelIndex=ch, destinationId=nodeid, wantAck=True)
|
||||
else:
|
||||
logger.info(f"Device:{nodeInt} " + CustomFormatter.red + "Sending DM: " + CustomFormatter.white + message.replace('\n', ' ') + CustomFormatter.purple +\
|
||||
" To: " + CustomFormatter.white + f"{get_name_from_number(nodeid, 'long', nodeInt)}")
|
||||
interface.sendText(text=message, channelIndex=ch, destinationId=nodeid)
|
||||
_send_with_reply(text=message, channelIndex=ch, destinationId=nodeid)
|
||||
# Throttle the message sending to prevent spamming the device
|
||||
time.sleep(responseDelay)
|
||||
return True
|
||||
@@ -929,17 +951,24 @@ def send_message(message, ch, nodeid=0, nodeInt=1, bypassChuncking=False):
|
||||
logger.error(f"System: Exception during send_message: {e} (message length: {len(message)})")
|
||||
return False
|
||||
|
||||
def send_raw_bytes(nodeid, raw_bytes, nodeInt=1, channel=0, portnum=256, want_ack=True):
|
||||
def send_raw_bytes(nodeid, raw_bytes, nodeInt=1, channel=0, portnum=256, want_ack=True, reply_id=None):
|
||||
# Send raw bytes to a node using the Meshtastic interface.
|
||||
interface = globals()[f'interface{nodeInt}']
|
||||
try:
|
||||
interface.sendData(
|
||||
raw_bytes,
|
||||
destinationId=nodeid,
|
||||
portNum=portnum,
|
||||
channelIndex=channel,
|
||||
wantAck=want_ack
|
||||
)
|
||||
send_kwargs = {
|
||||
'destinationId': nodeid,
|
||||
'portNum': portnum,
|
||||
'channelIndex': channel,
|
||||
'wantAck': want_ack,
|
||||
}
|
||||
if reply_id is not None:
|
||||
try:
|
||||
interface.sendData(raw_bytes, replyId=reply_id, **send_kwargs)
|
||||
except TypeError:
|
||||
logger.debug("System: replyId/replyID unsupported for sendData; sending without threaded reply")
|
||||
interface.sendData(raw_bytes, **send_kwargs)
|
||||
else:
|
||||
interface.sendData(raw_bytes, **send_kwargs)
|
||||
# Throttle the message sending to prevent spamming the device
|
||||
logger.debug(f"System: Sent raw bytes to {nodeid} on portnum {portnum} via Device{nodeInt}")
|
||||
time.sleep(responseDelay)
|
||||
@@ -1304,8 +1333,8 @@ def handleAlertBroadcast(deviceID=1):
|
||||
if should_send_alert("overdue", overdueAlerts, min_interval=300): # 5 minutes interval for overdue alerts
|
||||
send_message(overdueAlerts, emergency_responder_alert_channel, 0, emergency_responder_alert_interface)
|
||||
|
||||
# Only allow API call every 20 minutes
|
||||
if not (clock.minute % 20 == 0 and clock.second <= 17):
|
||||
# Only allow API call every alert_duration minutes at xx:00, xx:20, xx:40
|
||||
if not (clock.minute % alert_duration == 0 and clock.second <= 17):
|
||||
return False
|
||||
|
||||
# Collect alerts
|
||||
@@ -1518,6 +1547,14 @@ def initializeMeshLeaderboard():
|
||||
}
|
||||
|
||||
initializeMeshLeaderboard()
|
||||
|
||||
# Known Meshtastic firmware PKI routing errors and practical operator guidance.
|
||||
PKI_ROUTING_ERROR_HINTS = {
|
||||
'PKI_SEND_FAIL_PUBLIC_KEY': 'bot does not have destination public key. or key is missing from the device. Add the destination nodeID to the favorite nodes list, then retry.',
|
||||
'PKI_UNKNOWN_PUBKEY': 'Receiver could not decrypt PKI packet due to missing sender public key. Trigger a NodeInfo exchange both directions, then retry.',
|
||||
'PKI_FAILED': 'PKI was explicitly requested but send prerequisites were not met. Verify PKI-capable firmware/config, key material, and direct-send destination.',
|
||||
}
|
||||
|
||||
def consumeMetadata(packet, rxNode=0, channel=-1):
|
||||
global positionMetadata, localTelemetryData, meshLeaderboard
|
||||
uptime = battery = temp = iaq = nodeID = 0
|
||||
@@ -1830,6 +1867,39 @@ def consumeMetadata(packet, rxNode=0, channel=-1):
|
||||
except Exception as e:
|
||||
logger.debug(f"System: ADMIN_APP decode error: Device: {rxNode} Channel: {channel} {e} packet {packet}")
|
||||
|
||||
# ROUTING_APP - meta for logs
|
||||
if packet_type == 'ROUTING_APP':
|
||||
try:
|
||||
if debugMetadata and 'ROUTING_APP' not in metadataFilter:
|
||||
print(f"DEBUG ROUTING_APP: {packet}\n\n")
|
||||
routing_data = packet['decoded']['routing']
|
||||
|
||||
# Meshtastic Python/client can surface this field as errorReason or error_reason.
|
||||
error_reason = routing_data.get('errorReason', routing_data.get('error_reason', ''))
|
||||
if error_reason:
|
||||
requester_node = packet.get('from', nodeID)
|
||||
requester_id = packet.get('fromId', '')
|
||||
target_node = packet.get('to', 0)
|
||||
request_id = packet.get('decoded', {}).get('requestId', packet.get('decoded', {}).get('request_id', 0))
|
||||
pki_hint = PKI_ROUTING_ERROR_HINTS.get(error_reason, 'No playbook entry yet. Check node public keys/NodeInfo sync and firmware versions on both peers.')
|
||||
|
||||
# Standardized PKI routing failure log with source/target context for triage.
|
||||
if str(error_reason).startswith('PKI_'):
|
||||
logger.warning(
|
||||
f"System: PKI Routing Error Device:{rxNode} Channel:{channel} Reason:{error_reason} "
|
||||
f"RequesterNode:{requester_node} RequesterID:{requester_id} "
|
||||
f"RequesterShort:{get_name_from_number(requester_node, 'short', rxNode)} "
|
||||
f"TargetNode:{target_node} RequestId:{request_id} Guidance:{pki_hint}"
|
||||
)
|
||||
elif logMetaStats:
|
||||
logger.info(
|
||||
f"System: ROUTING_APP Error Device:{rxNode} Channel:{channel} Reason:{error_reason} "
|
||||
f"RequesterNode:{requester_node} TargetNode:{target_node} RequestId:{request_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"System: ROUTING_APP decode error: Device: {rxNode} Channel: {channel} {e} packet {packet}")
|
||||
|
||||
|
||||
# IP_TUNNEL_APP - Track tunneling packets 🚨
|
||||
if packet_type == 'IP_TUNNEL_APP':
|
||||
try:
|
||||
@@ -2425,8 +2495,36 @@ async def watchdog():
|
||||
load_bbsdm()
|
||||
load_bbsdb()
|
||||
|
||||
def saveAllData():
|
||||
try:
|
||||
# Save BBS data if enabled
|
||||
if bbs_enabled:
|
||||
save_bbsdb()
|
||||
save_bbsdm()
|
||||
logger.debug("Persistence: BBS data saved")
|
||||
|
||||
# Save leaderboard data if enabled
|
||||
if logMetaStats:
|
||||
saveLeaderboard()
|
||||
logger.debug("Persistence: Leaderboard data saved")
|
||||
|
||||
# Save ban list
|
||||
save_bbsBanList()
|
||||
logger.debug("Persistence: Ban list saved")
|
||||
|
||||
#logger.info("Persistence: Save completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Persistence: Save error: {e}")
|
||||
|
||||
async def dataPersistenceLoop():
|
||||
"""Data persistence service loop for periodic data saving"""
|
||||
logger.debug("Persistence: Loop started")
|
||||
while True:
|
||||
await asyncio.sleep(dataPersistence_interval)
|
||||
saveAllData()
|
||||
|
||||
def exit_handler():
|
||||
# Close the interface and save the BBS messages
|
||||
# Close the interface and save all data
|
||||
logger.debug(f"System: Closing Autoresponder")
|
||||
try:
|
||||
logger.debug(f"System: Closing Interface1")
|
||||
@@ -2438,12 +2536,9 @@ def exit_handler():
|
||||
globals()[f'interface{i}'].close()
|
||||
except Exception as e:
|
||||
logger.error(f"System: closing: {e}")
|
||||
if bbs_enabled:
|
||||
save_bbsdb()
|
||||
save_bbsdm()
|
||||
logger.debug(f"System: BBS Messages Saved")
|
||||
if logMetaStats:
|
||||
saveLeaderboard()
|
||||
|
||||
saveAllData()
|
||||
|
||||
logger.debug(f"System: Exiting")
|
||||
asyncLoop.stop()
|
||||
asyncLoop.close()
|
||||
|
||||
+4
-1
@@ -671,8 +671,11 @@ async def main():
|
||||
# Create core tasks
|
||||
tasks.append(asyncio.create_task(start_rx(), name="mesh_rx"))
|
||||
tasks.append(asyncio.create_task(watchdog(), name="watchdog"))
|
||||
|
||||
|
||||
# Add optional tasks
|
||||
if my_settings.dataPersistence_enabled:
|
||||
tasks.append(asyncio.create_task(dataPersistenceLoop(), name="data_persistence"))
|
||||
|
||||
if my_settings.file_monitor_enabled:
|
||||
tasks.append(asyncio.create_task(handleFileWatcher(), name="file_monitor"))
|
||||
|
||||
|
||||
+2
-3
@@ -1,7 +1,6 @@
|
||||
meshtastic
|
||||
pubsub
|
||||
datetime
|
||||
pyephem
|
||||
PyPubSub
|
||||
ephem
|
||||
requests
|
||||
maidenhead
|
||||
beautifulsoup4
|
||||
|
||||
@@ -18,7 +18,7 @@ try:
|
||||
from pubsub import pub
|
||||
from meshtastic.protobuf import mesh_pb2, portnums_pb2
|
||||
except ImportError:
|
||||
print("meshtastic API not found. pip install -U meshtastic")
|
||||
print("meshtastic API not found. pip install -U meshtastic")
|
||||
exit(1)
|
||||
|
||||
try:
|
||||
@@ -26,6 +26,8 @@ try:
|
||||
from mudp.encryption import generate_hash
|
||||
except ImportError:
|
||||
print("mUDP module not found. pip install -U mudp")
|
||||
print("If launching, venv run source venv/bin/activate and then pip install -U mudp pygame-ce")
|
||||
print("use deactivate to exit venv when done")
|
||||
exit(1)
|
||||
try:
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||
|
||||
Reference in New Issue
Block a user