# -*- coding: utf-8 -*-
# HTML5
import os
import re
import sys
import glob
import json
import pickle
import platform
import requests
import subprocess
import configparser
from string import Template
from datetime import datetime
from importlib.metadata import version
from collections import Counter, defaultdict
# global variables
LOG_PATH = '/opt/meshing-around/logs' # override path to log files (defaults to ../log)
W3_PATH = '/var/www/html/' # override path to web server root (defaults to ../www)
multiLogReader = False # set to True to read all meshbot logs in ../log
shameWordList = ['password', 'combo', 'key', 'hidden', 'secret', 'pass', 'token', 'login', 'username', 'admin', 'root']
# system variables
script_dir = os.path.dirname(os.path.realpath(__file__))
www_dir = os.path.join(script_dir, 'www')
config_file = os.path.join(script_dir, 'web_reporter.cfg')
# set up report.cfg as ini file
config = configparser.ConfigParser()
try:
config.read(config_file)
except Exception as e:
print(f"Error reading web_reporter.cfg: {str(e)} generating default config")
if config.sections() == []:
print(f"web_reporter.cfg is empty or does not exist, generating default config")
config['reporting'] = {'log_path': script_dir, 'w3_path': www_dir, 'multi_log_reader': 'True', 'shame_word_list': 'password, combo, key, hidden, secret, pass, token, login, username, admin, root'}
with open(config_file, 'w') as configfile:
config.write(configfile)
# read config file
LOG_PATH = config['reporting'].get('log_path', LOG_PATH)
W3_PATH = config['reporting'].get('w3_path', W3_PATH)
multiLogReader = config['reporting'].getboolean('multi_log_reader', multiLogReader)
# config['reporting']['shame_word_list'] is a comma-separated string
shameWordList = config['reporting'].get('shame_word_list', '')
if isinstance(shameWordList, str):
shameWordList = shameWordList.split(', ')
def parse_log_file(file_path):
global log_data
lines = ['']
# see if many logs are present
if multiLogReader:
# set file_path to the cwd of the default project ../log
log_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'logs')
log_files = glob.glob(os.path.join(log_dir, 'meshbot*.log'))
print(f"Checking log files: {log_files}")
if log_files:
log_files.sort()
for logFile in log_files:
with open(os.path.join(log_dir, logFile), 'r') as file:
lines += file.readlines()
else:
try:
print(f"Checking log file: {file_path}")
with open(file_path, 'r') as file:
lines = file.readlines()
except FileNotFoundError:
print(f"Error: File not found at {file_path}")
sys.exit(1)
if multiLogReader:
print(f"Consumed {len(lines)} lines from {len(log_files)} log files")
else:
print(f"Consumed {len(lines)} lines from {file_path}")
log_data = {
'command_counts': Counter(),
'message_types': Counter(),
'llm_queries': Counter(),
'unique_users': set(),
'warnings': [],
'errors': [],
'hourly_activity': defaultdict(int),
'bbs_messages': 0,
'messages_waiting': 0,
'total_messages': 0,
'gps_coordinates': defaultdict(list),
'command_timestamps': [],
'message_timestamps': [],
'firmware1_version': "N/A",
'firmware2_version': "N/A",
'node1_uptime': "N/A",
'node2_uptime': "N/A",
'node1_name': "N/A",
'node2_name': "N/A",
'node1_ID': "N/A",
'node2_ID': "N/A",
'shameList': []
}
for line in lines:
timestamp_match = re.match(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}),\d+', line)
if timestamp_match:
timestamp = datetime.strptime(timestamp_match.group(1), '%Y-%m-%d %H:%M:%S')
log_data['hourly_activity'][timestamp.strftime('%Y-%m-%d %H:00:00')] += 1
if 'Bot detected Commands' in line or 'LLM Query:' in line or 'PlayingGame' in line:
# get the command and user from the line
command = re.search(r"'cmd': '(\w+)'", line)
user = re.search(r"From: (.+)$", line)
if 'LLM Query:' in line:
log_data['command_counts']['LLM Query'] += 1
log_data['command_timestamps'].append((timestamp.isoformat(), 'LLM Query'))
if 'PlayingGame' in line:
#log line looks like this. 2024-10-04 20:24:53,381 | DEBUG | System: 862418040 PlayingGame BlackJack last_cmd: new
game = re.search(r'PlayingGame (\w+)', line)
user = re.search(r'System: (\d+)', line)
log_data['command_counts'][game.group(1)] += 1
log_data['command_timestamps'].append((timestamp.isoformat(), game))
if user:
user = user.group(1)
if command:
cmd = command.group(1)
log_data['command_counts'][cmd] += 1
# include the user who sent the command
log_data['command_timestamps'].append((timestamp.isoformat(), cmd + f' from {user}'))
if 'Sending DM:' in line or 'Sending Multi-Chunk DM:' in line or 'SendingChannel:' in line or 'Sending Multi-Chunk Message:' in line:
log_data['message_types']['Outgoing DM'] += 1
log_data['total_messages'] += 1
log_data['message_timestamps'].append((timestamp.isoformat(), 'Outgoing DM'))
if 'Received DM:' in line or 'Ignoring DM:' in line or 'Ignoring Message:' in line or 'ReceivedChannel:' in line or 'LLM Query:' in line:
log_data['message_types']['Incoming DM'] += 1
log_data['total_messages'] += 1
# include a little of the message
if 'Ignoring Message:' in line:
log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("Ignoring Message:")[1][:90]}'))
elif 'Ignoring DM:' in line:
log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("Ignoring DM:")[1][:90]}'))
elif 'LLM Query:' in line:
log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("LLM Query:")[1][:90]}'))
else:
log_data['message_timestamps'].append((timestamp.isoformat(), 'Incoming:'))
# check for shame words in the message
for word in shameWordList:
if word in line.lower():
if line not in log_data['shameList']:
line = line.replace('Ignoring Message:', '')
line = line.replace('|', '')
line = line.replace('INFO', '')
line = line.replace('DEBUG', '')
log_data['shameList'].insert(0, line)
# get the user who sent the message
if 'To: ' in line:
user_match = re.search(r"From: '([^']+)'(?: To:|$)", line)
else:
user_match = re.search(r"From: (.+)$", line)
if user_match:
log_data['unique_users'].add(user_match.group(1))
# Error Logs
if 'WARNING |' in line:
# remove some junk from the line
line = line.replace('|', '')
line = line.replace(' ', ' ')
log_data['warnings'].insert(0, line)
if 'ERROR |' in line or 'CRITICAL |' in line:
# remove some junk from the line
line = line.replace('System:', '')
line = line.replace('|', '')
line = line.replace(' ', ' ')
log_data['errors'].insert(0, line)
# bbs messages
bbs_match = re.search(r'📡BBSdb has (\d+) messages.*?Messages waiting: (\d+)', line)
if bbs_match:
bbs_messages = int(bbs_match.group(1))
messages_waiting = int(bbs_match.group(2))
log_data['bbs_messages'] = bbs_messages
log_data['messages_waiting'] = messages_waiting
gps_match = re.search(r'location data for (\d+) is ([-\d.]+),([-\d.]+)', line)
if gps_match:
node_id = None
node_id, lat, lon = gps_match.groups()
log_data['gps_coordinates'][node_id].append((float(lat), float(lon)))
# get telemetry data
# example line = | Telemetry:1 numPacketsTx:-1 numPacketsRx:-1 numPacketsTxErr:-1 numPacketsRxErr:-1 ChUtil%:0.0 AirTx%:0.0 Rx#:-1 Tx#:-1 Nodes:2 Uptime:11d Volt:4.3 Firmware:2.5.2.771cb52
telemetry_match = re.search(r'Telemetry:(\d+) numPacketsRx:(\d+) numPacketsRxErr:(\d+) numPacketsTx:(\d+) numPacketsTxErr:(\d+) ChUtil%:(\d+\.\d+) AirTx%:(\d+\.\d+) totalNodes:(\d+) Online:(\d+) Uptime:(\d+h) Volt:(\d+\.\d+) Firmware:(\d+\.\d+\.\d+\.\w+)', line)
if telemetry_match:
interface_number, numPacketsRx, numPacketsRxErr, numPacketsTx, numPacketsTxErr, ChUtil, AirTx, nodes, online, uptime, volt, firmware_version = telemetry_match.groups()
data = f"Tx: {numPacketsTx} Rx: {numPacketsRx} Uptime: {uptime} Volt: {volt} numPacketsRxErr: {numPacketsRxErr} numPacketsTxErr: {numPacketsTxErr} ChUtil: {ChUtil} AirTx: {AirTx} Nodes: {nodes} Online: {online}"
if interface_number == '1':
log_data['firmware1_version'] = firmware_version
log_data['node1_uptime'] = data
#log_data['nodeCount1'] = nodes
# get name and nodeID for devices
if 'Autoresponder Started for Device' in line:
device_match = re.search(r'Autoresponder Started for Device(\d+)\s+([^\s,]+).*?NodeID: (\d+)', line)
if device_match:
device_id = device_match.group(1)
device_name = device_match.group(2)
node_id = device_match.group(3)
if device_id == '1':
log_data['node1_name'] = device_name
log_data['node1_ID'] = node_id
elif device_id == '2':
log_data['node2_name'] = device_name
log_data['node2_ID'] = node_id
log_data['unique_users'] = list(log_data['unique_users'])
return log_data
def get_system_info():
def get_command_output(command):
try:
return subprocess.check_output(command, shell=True).decode('utf-8').strip()
except subprocess.CalledProcessError:
return "N/A"
# Capture some system information from log_data
firmware1_version = log_data['firmware1_version']
firmware2_version = log_data['firmware2_version']
node1_uptime = log_data['node1_uptime']
node2_uptime = log_data['node2_uptime']
node1_name = log_data['node1_name']
node2_name = log_data['node2_name']
node1_ID = log_data['node1_ID']
node2_ID = log_data['node2_ID']
# get Meshtastic CLI version on web
try:
url = "https://pypi.org/pypi/meshtastic/json"
data = requests.get(url, timeout=5).json()
pypi_version = data["info"]["version"]
cli_web = f"v{pypi_version}"
except Exception:
pass
# get Meshtastic CLI version on local
try:
if "importlib.metadata" in sys.modules:
cli_local = version("meshtastic")
except:
pass # Python 3.7 and below, meh..
if platform.system() == "Linux":
uptime = get_command_output("uptime -p")
memory_total = get_command_output("free -m | awk '/Mem:/ {print $2}'")
memory_available = get_command_output("free -m | awk '/Mem:/ {print $7}'")
disk_total = get_command_output("df -h / | awk 'NR==2 {print $2}'")
disk_free = get_command_output("df -h / | awk 'NR==2 {print $4}'")
elif platform.system() == "Darwin": # macOS
uptime = get_command_output("uptime | awk '{print $3,$4,$5}'")
memory_total = get_command_output("sysctl -n hw.memsize | awk '{print $0/1024/1024}'")
memory_available = "N/A" # Not easily available on macOS without additional tools
disk_total = get_command_output("df -h / | awk 'NR==2 {print $2}'")
disk_free = get_command_output("df -h / | awk 'NR==2 {print $4}'")
else:
return {
'uptime': "N/A",
'memory_total': "N/A",
'memory_available': "N/A",
'disk_total': "N/A",
'disk_free': "N/A",
'interface1_version': "N/A",
'interface2_version': "N/A",
'node1_uptime': "N/A",
'node2_uptime': "N/A",
'node1_name': "N/A",
'node2_name': "N/A",
'node1_ID': "N/A",
'node2_ID': "N/A",
'cli_web': "N/A",
'cli_local': "N/A"
}
return {
'uptime': uptime,
'memory_total': f"{memory_total} MB",
'memory_available': f"{memory_available} MB" if memory_available != "N/A" else "N/A",
'disk_total': disk_total,
'disk_free': disk_free,
'interface1_version': firmware1_version,
'interface2_version': firmware2_version,
'node1_uptime': node1_uptime,
'node2_uptime': node2_uptime,
'node1_name': node1_name,
'node2_name': node2_name,
'node1_ID': node1_ID,
'node2_ID': node2_ID,
'cli_web': cli_web,
'cli_local': cli_local
}
def get_wall_of_shame():
# Get the wall of shame out of the log data
logShameList = log_data['shameList']
# future space for other ideas
return {
'shame': ', '.join(shameWordList),
'shameList': '\n'.join(f'
{line}
' for line in logShameList),
}
def get_database_info():
# ../config.ini location to script path
config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'config.ini')
# get config.ini variables
config = configparser.ConfigParser()
config.read(config_path)
# for section in config.sections():
# print(f"Section: {section}")
# for key in config[section]:
# print(f"Key: {key}, Value: {config[section][key]}")
banList = config['bbs'].get('bbs_ban_list', 'none')
adminList = config['bbs'].get('bbs_admin_list', 'none')
sentryIgnoreList = config['sentry'].get('sentryIgnoreList', 'none')
# Define the base directory
base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data'))
# data files
databaseFiles = [os.path.join(base_dir, 'lemonstand_hs.pkl'),
os.path.join(base_dir, 'dopewar_hs.pkl'),
os.path.join(base_dir, 'blackjack_hs.pkl'),
os.path.join(base_dir, 'videopoker_hs.pkl'),
os.path.join(base_dir, 'mmind_hs.pkl'),
os.path.join(base_dir, 'golfsim_hs.pkl'),
os.path.join(base_dir, 'bbsdb.pkl'),
os.path.join(base_dir, 'bbsdm.pkl')]
for file in databaseFiles:
try:
with open(file, 'rb') as f:
if 'lemonstand' in file:
lemon_score = pickle.load(f)
elif 'dopewar' in file:
dopewar_score = pickle.load(f)
elif 'blackjack' in file:
blackjack_score = pickle.load(f)
elif 'videopoker' in file:
videopoker_score = pickle.load(f)
elif 'mmind' in file:
mmind_score = pickle.load(f)
elif 'golfsim' in file:
golfsim_score = pickle.load(f)
elif 'bbsdb' in file:
bbsdb = pickle.load(f)
elif 'bbsdm' in file:
bbsdm = pickle.load(f)
except Exception as e:
print(f"Error reading database file: {str(e)}")
if 'lemonstand' in file:
lemon_score = "no data"
elif 'dopewar' in file:
dopewar_score = "no data"
elif 'blackjack' in file:
blackjack_score = "no data"
elif 'videopoker' in file:
videopoker_score = "no data"
elif 'mmind' in file:
mmind_score = "no data"
elif 'golfsim' in file:
golfsim_score = "no data"
elif 'bbsdb' in file:
bbsdb = "no data"
elif 'bbsdm' in file:
bbsdm = "no data"
# pretty print the bbsdb
prettyBBSdb = ""
try:
for i in range(len(bbsdb)):
prettyBBSdb += f'
{bbsdb[i]}
'
except Exception as e:
print(f"Error with database: {str(e)}")
pass
# pretty print the bbsdm
prettyBBSdm = ""
try:
for i in range(len(bbsdm)):
prettyBBSdm += f'