Merge pull request #265 from SpudGunMan/lab

Lab
This commit is contained in:
Kelly
2025-11-11 23:56:01 -08:00
committed by GitHub
4 changed files with 108 additions and 5 deletions

View File

@@ -109,7 +109,7 @@ def auto_response(message, snr, rssi, hop, pkiStatus, message_from_id, channel_n
"setsms": lambda: handle_sms( message_from_id, message),
"sitrep": lambda: handle_lheard(message, message_from_id, deviceID, isDM),
"sms:": lambda: handle_sms(message_from_id, message),
"solar": lambda: drap_xray_conditions() + "\n" + solar_conditions(),
"solar": lambda: drap_xray_conditions() + "\n" + solar_conditions() + "\n" + get_noaa_scales_summary(),
"sun": lambda: handle_sun(message_from_id, deviceID, channel_number),
"survey": lambda: surveyHandler(message, message_from_id, deviceID),
"s:": lambda: surveyHandler(message, message_from_id, deviceID),
@@ -1917,15 +1917,31 @@ def onReceive(packet, interface):
channel_name, _ = res
except Exception:
channel_name = "unknown"
else:
# Search all interfaces for this channel
cache = build_channel_cache()
found_on_other = None
for device in cache:
for chan_name, info in device.get("channels", {}).items():
if str(info.get('number')) == str(channel_number) or str(info.get('hash')) == str(channel_number):
found_on_other = device.get("interface_id")
found_chan_name = chan_name
break
if found_on_other:
break
if found_on_other and found_on_other != rxNode:
logger.debug(
f"System: Received Packet on Channel:{channel_number} ({found_chan_name}) on Interface:{rxNode}, but this channel is configured on Interface:{found_on_other}"
)
except Exception as e:
logger.debug(f"System: channel resolution error: {e}")
#debug channel info
# if "unknown" in str(channel_name):
# logger.debug(f"System: Received Packet on Channel:{channel_number} on Interface:{rxNode}")
# else:
# logger.debug(f"System: Received Packet on Channel:{channel_number} Name:{channel_name} on Interface:{rxNode}")
# check if the packet has a simulator flag
simulator_flag = packet.get('decoded', {}).get('simulator', False)
if isinstance(simulator_flag, dict):

View File

@@ -68,6 +68,77 @@ def drap_xray_conditions():
xray_flux = ERROR_FETCHING_DATA
return xray_flux
def get_noaa_scales_summary():
"""
Show latest observed, 24-hour max, and predicted geomagnetic, storm, and blackout data.
"""
try:
response = requests.get("https://services.swpc.noaa.gov/products/noaa-scales.json", timeout=urlTimeoutSeconds)
if response.ok:
data = response.json()
today = datetime.utcnow().date()
latest_entry = None
latest_dt = None
max_g_today = None
max_g_scale = -1
predicted_g = None
predicted_g_scale = -1
# Find latest observed and 24-hour max for today
for entry in data.values():
date_str = entry.get("DateStamp")
time_str = entry.get("TimeStamp")
if date_str and time_str:
try:
dt = datetime.strptime(f"{date_str} {time_str}", "%Y-%m-%d %H:%M:%S")
g = entry.get("G", {})
g_scale = int(g.get("Scale", -1)) if g.get("Scale") else -1
# Latest observed for today
if dt.date() == today:
if latest_dt is None or dt > latest_dt:
latest_dt = dt
latest_entry = entry
# 24-hour max for today
if g_scale > max_g_scale:
max_g_scale = g_scale
max_g_today = entry
# Predicted (future)
elif dt.date() > today:
if g_scale > predicted_g_scale:
predicted_g_scale = g_scale
predicted_g = entry
except Exception:
continue
def format_entry(label, entry):
if not entry:
return f"{label}: No data"
g = entry.get("G", {})
s = entry.get("S", {})
r = entry.get("R", {})
parts = [f"{label} {g.get('Text', 'N/A')} (G:{g.get('Scale', 'N/A')})"]
# Only show storm if it's happening
if s.get("Text") and s.get("Text") != "none":
parts.append(f"Currently:{s.get('Text')} (S:{s.get('Scale', 'N/A')})")
# Only show blackout if it's not "none" or scale is not 0
if r.get("Text") and r.get("Text") != "none" and r.get("Scale") not in [None, "0", 0]:
parts.append(f"RF Blackout:{r.get('Text')} (R:{r.get('Scale', 'N/A')})")
return "\n".join(parts)
output = []
#output.append(format_entry("Latest Observed", latest_entry))
output.append(format_entry("24hrMax:", max_g_today))
output.append(format_entry("Predicted:", predicted_g))
return "\n".join(output)
else:
return NO_ALERTS
except Exception as e:
logger.warning(f"Error fetching services.swpc.noaa.gov: {e}")
return ERROR_FETCHING_DATA
def get_sun(lat=0, lon=0):
# get sunrise and sunset times using callers location or default
obs = ephem.Observer()

View File

@@ -4,7 +4,6 @@
import meshtastic.serial_interface #pip install meshtastic or use launch.sh for venv
import meshtastic.tcp_interface
import meshtastic.ble_interface
from meshtastic.util import generate_channel_hash
import time
import asyncio
import random
@@ -407,7 +406,7 @@ def build_channel_cache(force_refresh: bool = False):
ch_hash_table_raw = node.get_channels_with_hash()
#print(f"System: Device{i} Channel Hash Table: {ch_hash_table_raw}")
except Exception:
logger.warning(f"System: update meshtastic API 2.7.4 +")
logger.warning(f"System: API version error update API `pip3 install --upgrade meshtastic[cli]`")
ch_hash_table_raw = []
channel_dict = {}
@@ -441,6 +440,7 @@ def refresh_channel_cache():
return build_channel_cache(force_refresh=True)
channel_list = build_channel_cache()
#print(f"System: Channel Cache Built: {channel_list}")
#### FUN-ctions ####
def resolve_channel_name(channel_number, rxNode=1, interface_obj=None):

View File

@@ -287,6 +287,22 @@ def onReceive(packet, interface):
channel_name, _ = res
except Exception:
channel_name = "unknown"
else:
# Search all interfaces for this channel
cache = build_channel_cache()
found_on_other = None
for device in cache:
for chan_name, info in device.get("channels", {}).items():
if str(info.get('number')) == str(channel_number) or str(info.get('hash')) == str(channel_number):
found_on_other = device.get("interface_id")
found_chan_name = chan_name
break
if found_on_other:
break
if found_on_other and found_on_other != rxNode:
logger.debug(
f"System: Received Packet on Channel:{channel_number} ({found_chan_name}) on Interface:{rxNode}, but this channel is configured on Interface:{found_on_other}"
)
except Exception as e:
logger.debug(f"System: channel resolution error: {e}")