# -*- coding: utf-8 -*- import os import re import sys import glob import json import pickle import platform import requests import subprocess import configparser from string import Template from datetime import datetime from importlib.metadata import version from collections import Counter, defaultdict # global variables LOG_PATH = '/opt/meshing-around/logs' # override path to log files (defaults to ../log) W3_PATH = '/var/www/html/' # override path to web server root (defaults to ../www) multiLogReader = False # set to True to read all logs in ../log shameWordList = ['password', 'combo', 'key', 'hidden', 'secret', 'pass', 'token', 'login', 'username', 'admin', 'root'] # system variables script_dir = os.path.dirname(os.path.realpath(__file__)) www_dir = os.path.join(script_dir, 'www') config_file = os.path.join(script_dir, 'web_reporter.cfg') # set up report.cfg as ini file config = configparser.ConfigParser() try: config.read(config_file) except Exception as e: print(f"Error reading web_reporter.cfg: {str(e)} generating default config") if config.sections() == []: print(f"web_reporter.cfg is empty or does not exist, generating default config") config['reporting'] = {'log_path': script_dir, 'w3_path': www_dir, 'multi_log_reader': 'True', 'shame_word_list': 'password, combo, key, hidden, secret, pass, token, login, username, admin, root'} with open(config_file, 'w') as configfile: config.write(configfile) # read config file LOG_PATH = config['reporting'].get('log_path', LOG_PATH) W3_PATH = config['reporting'].get('w3_path', W3_PATH) multiLogReader = config['reporting'].getboolean('multi_log_reader', multiLogReader) # config['reporting']['shame_word_list'] is a comma-separated string shameWordList = config['reporting'].get('shame_word_list', '') if isinstance(shameWordList, str): shameWordList = shameWordList.split(', ') def parse_log_file(file_path): global log_data lines = [''] # see if many logs are present if multiLogReader: # set file_path to the cwd of the default project ../log log_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'logs') log_files = glob.glob(os.path.join(log_dir, 'meshbot*.log')) print(f"Checking log files: {log_files}") if log_files: log_files.sort() for logFile in log_files: with open(os.path.join(log_dir, logFile), 'r') as file: lines += file.readlines() else: try: print(f"Checking log file: {file_path}") with open(file_path, 'r') as file: lines = file.readlines() except FileNotFoundError: print(f"Error: File not found at {file_path}") sys.exit(1) if multiLogReader: print(f"Consumed {len(lines)} lines from {len(log_files)} log files") else: print(f"Consumed {len(lines)} lines from {file_path}") log_data = { 'command_counts': Counter(), 'message_types': Counter(), 'llm_queries': Counter(), 'unique_users': set(), 'warnings': [], 'errors': [], 'hourly_activity': defaultdict(int), 'bbs_messages': 0, 'messages_waiting': 0, 'total_messages': 0, 'gps_coordinates': defaultdict(list), 'command_timestamps': [], 'message_timestamps': [], 'firmware1_version': "N/A", 'firmware2_version': "N/A", 'node1_uptime': "N/A", 'node2_uptime': "N/A", 'node1_name': "N/A", 'node2_name': "N/A", 'node1_ID': "N/A", 'node2_ID': "N/A", 'shameList': [] } for line in lines: timestamp_match = re.match(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}),\d+', line) if timestamp_match: timestamp = datetime.strptime(timestamp_match.group(1), '%Y-%m-%d %H:%M:%S') log_data['hourly_activity'][timestamp.strftime('%Y-%m-%d %H:00:00')] += 1 if 'Bot detected Commands' in line or 'LLM Query:' in line or 'PlayingGame' in line: # get the command and user from the line command = re.search(r"'cmd': '(\w+)'", line) user = re.search(r"From: (.+)$", line) if 'LLM Query:' in line: log_data['command_counts']['LLM Query'] += 1 log_data['command_timestamps'].append((timestamp.isoformat(), 'LLM Query')) if 'PlayingGame' in line: #log line looks like this. 2024-10-04 20:24:53,381 | DEBUG | System: 862418040 PlayingGame BlackJack last_cmd: new game = re.search(r'PlayingGame (\w+)', line) user = re.search(r'System: (\d+)', line) log_data['command_counts'][game.group(1)] += 1 log_data['command_timestamps'].append((timestamp.isoformat(), game)) if 'Sending DM:' in line or 'Sending Multi-Chunk DM:' in line or 'SendingChannel:' in line or 'Sending Multi-Chunk Message:' in line: log_data['message_types']['Outgoing DM'] += 1 log_data['total_messages'] += 1 log_data['message_timestamps'].append((timestamp.isoformat(), 'Outgoing DM')) if 'Received DM:' in line or 'Ignoring DM:' in line or 'Ignoring Message:' in line or 'ReceivedChannel:' in line or 'LLM Query:' in line: log_data['message_types']['Incoming DM'] += 1 log_data['total_messages'] += 1 # include a little of the message if 'Ignoring Message:' in line: log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("Ignoring Message:")[1][:90]}')) elif 'Ignoring DM:' in line: log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("Ignoring DM:")[1][:90]}')) elif 'LLM Query:' in line: log_data['message_timestamps'].append((timestamp.isoformat(), f'Incoming: {line.split("LLM Query:")[1][:90]}')) else: log_data['message_timestamps'].append((timestamp.isoformat(), 'Incoming:')) # check for shame words in the message for word in shameWordList: if word in line.lower(): if line not in log_data['shameList']: line = line.replace('Ignoring Message:', '') line = line.replace('|', '') line = line.replace('INFO', '') line = line.replace('DEBUG', '') log_data['shameList'].insert(0, line) # get the user who sent the message if 'To: ' in line: user_match = re.search(r"From: '([^']+)'(?: To:|$)", line) else: user_match = re.search(r"From: (.+)$", line) if user_match: log_data['unique_users'].add(user_match.group(1)) # Error Logs if 'WARNING |' in line: # remove some junk from the line line = line.replace('|', '') line = line.replace(' ', ' ') log_data['warnings'].insert(0, line) if 'ERROR |' in line or 'CRITICAL |' in line: # remove some junk from the line line = line.replace('System:', '') line = line.replace('|', '') line = line.replace(' ', ' ') log_data['errors'].insert(0, line) # bbs messages bbs_match = re.search(r'📡BBSdb has (\d+) messages.*?Messages waiting: (\d+)', line) if bbs_match: bbs_messages = int(bbs_match.group(1)) messages_waiting = int(bbs_match.group(2)) log_data['bbs_messages'] = bbs_messages log_data['messages_waiting'] = messages_waiting gps_match = re.search(r'location data for (\d+) is ([-\d.]+),([-\d.]+)', line) if gps_match: node_id = None node_id, lat, lon = gps_match.groups() log_data['gps_coordinates'][node_id].append((float(lat), float(lon))) # get telemetry data # example line = | Telemetry:1 numPacketsTx:-1 numPacketsRx:-1 numPacketsTxErr:-1 numPacketsRxErr:-1 ChUtil%:0.0 AirTx%:0.0 Rx#:-1 Tx#:-1 Nodes:2 Uptime:11d Volt:4.3 Firmware:2.5.2.771cb52 telemetry_match = re.search(r'Telemetry:(\d+) numPacketsRx:(\d+) numPacketsRxErr:(\d+) numPacketsTx:(\d+) numPacketsTxErr:(\d+) ChUtil%:(\d+\.\d+) AirTx%:(\d+\.\d+) totalNodes:(\d+) Online:(\d+) Uptime:(\d+h) Volt:(\d+\.\d+) Firmware:(\d+\.\d+\.\d+\.\w+)', line) if telemetry_match: interface_number, numPacketsRx, numPacketsRxErr, numPacketsTx, numPacketsTxErr, ChUtil, AirTx, nodes, online, uptime, volt, firmware_version = telemetry_match.groups() data = f"Tx: {numPacketsTx} Rx: {numPacketsRx} Uptime: {uptime} Volt: {volt} numPacketsRxErr: {numPacketsRxErr} numPacketsTxErr: {numPacketsTxErr} ChUtil: {ChUtil} AirTx: {AirTx} Nodes: {nodes} Online: {online}" if interface_number == '1': log_data['firmware1_version'] = firmware_version log_data['node1_uptime'] = data #log_data['nodeCount1'] = nodes # get name and nodeID for devices if 'Autoresponder Started for Device' in line: device_match = re.search(r'Autoresponder Started for Device(\d+)\s+([^\s,]+).*?NodeID: (\d+)', line) if device_match: device_id = device_match.group(1) device_name = device_match.group(2) node_id = device_match.group(3) if device_id == '1': log_data['node1_name'] = device_name log_data['node1_ID'] = node_id elif device_id == '2': log_data['node2_name'] = device_name log_data['node2_ID'] = node_id log_data['unique_users'] = list(log_data['unique_users']) return log_data def get_system_info(): def get_command_output(command): try: return subprocess.check_output(command, shell=True).decode('utf-8').strip() except subprocess.CalledProcessError: return "N/A" # Capture some system information from log_data firmware1_version = log_data['firmware1_version'] firmware2_version = log_data['firmware2_version'] node1_uptime = log_data['node1_uptime'] node2_uptime = log_data['node2_uptime'] node1_name = log_data['node1_name'] node2_name = log_data['node2_name'] node1_ID = log_data['node1_ID'] node2_ID = log_data['node2_ID'] # get Meshtastic CLI version on web try: url = "https://pypi.org/pypi/meshtastic/json" data = requests.get(url, timeout=5).json() pypi_version = data["info"]["version"] cli_web = f"v{pypi_version}" except Exception: pass # get Meshtastic CLI version on local try: if "importlib.metadata" in sys.modules: cli_local = version("meshtastic") except: pass # Python 3.7 and below, meh.. if platform.system() == "Linux": uptime = get_command_output("uptime -p") memory_total = get_command_output("free -m | awk '/Mem:/ {print $2}'") memory_available = get_command_output("free -m | awk '/Mem:/ {print $7}'") disk_total = get_command_output("df -h / | awk 'NR==2 {print $2}'") disk_free = get_command_output("df -h / | awk 'NR==2 {print $4}'") elif platform.system() == "Darwin": # macOS uptime = get_command_output("uptime | awk '{print $3,$4,$5}'") memory_total = get_command_output("sysctl -n hw.memsize | awk '{print $0/1024/1024}'") memory_available = "N/A" # Not easily available on macOS without additional tools disk_total = get_command_output("df -h / | awk 'NR==2 {print $2}'") disk_free = get_command_output("df -h / | awk 'NR==2 {print $4}'") else: return { 'uptime': "N/A", 'memory_total': "N/A", 'memory_available': "N/A", 'disk_total': "N/A", 'disk_free': "N/A", 'interface1_version': "N/A", 'interface2_version': "N/A", 'node1_uptime': "N/A", 'node2_uptime': "N/A", 'node1_name': "N/A", 'node2_name': "N/A", 'node1_ID': "N/A", 'node2_ID': "N/A", 'cli_web': "N/A", 'cli_local': "N/A" } return { 'uptime': uptime, 'memory_total': f"{memory_total} MB", 'memory_available': f"{memory_available} MB" if memory_available != "N/A" else "N/A", 'disk_total': disk_total, 'disk_free': disk_free, 'interface1_version': firmware1_version, 'interface2_version': firmware2_version, 'node1_uptime': node1_uptime, 'node2_uptime': node2_uptime, 'node1_name': node1_name, 'node2_name': node2_name, 'node1_ID': node1_ID, 'node2_ID': node2_ID, 'cli_web': cli_web, 'cli_local': cli_local } def get_wall_of_shame(): # Get the wall of shame out of the log data logShameList = log_data['shameList'] # future space for other ideas return { 'shame': ', '.join(shameWordList), 'shameList': '\n'.join(f'
  • {line}
  • ' for line in logShameList), } def get_database_info(): # ../config.ini location to script path config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'config.ini') # get config.ini variables config = configparser.ConfigParser() config.read(config_path) # for section in config.sections(): # print(f"Section: {section}") # for key in config[section]: # print(f"Key: {key}, Value: {config[section][key]}") banList = config['bbs'].get('bbs_ban_list', 'none') adminList = config['bbs'].get('bbs_admin_list', 'none') sentryIgnoreList = config['sentry'].get('sentryIgnoreList', 'none') # Define the base directory base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data')) # data files databaseFiles = [os.path.join(base_dir, 'lemonstand_hs.pkl'), os.path.join(base_dir, 'dopewar_hs.pkl'), os.path.join(base_dir, 'blackjack_hs.pkl'), os.path.join(base_dir, 'videopoker_hs.pkl'), os.path.join(base_dir, 'mmind_hs.pkl'), os.path.join(base_dir, 'golfsim_hs.pkl'), os.path.join(base_dir, 'bbsdb.pkl'), os.path.join(base_dir, 'bbsdm.pkl')] for file in databaseFiles: try: with open(file, 'rb') as f: if 'lemonstand' in file: lemon_score = pickle.load(f) elif 'dopewar' in file: dopewar_score = pickle.load(f) elif 'blackjack' in file: blackjack_score = pickle.load(f) elif 'videopoker' in file: videopoker_score = pickle.load(f) elif 'mmind' in file: mmind_score = pickle.load(f) elif 'golfsim' in file: golfsim_score = pickle.load(f) elif 'bbsdb' in file: bbsdb = pickle.load(f) elif 'bbsdm' in file: bbsdm = pickle.load(f) except Exception as e: print(f"Error reading database file: {str(e)}") if 'lemonstand' in file: lemon_score = "no data" elif 'dopewar' in file: dopewar_score = "no data" elif 'blackjack' in file: blackjack_score = "no data" elif 'videopoker' in file: videopoker_score = "no data" elif 'mmind' in file: mmind_score = "no data" elif 'golfsim' in file: golfsim_score = "no data" elif 'bbsdb' in file: bbsdb = "no data" elif 'bbsdm' in file: bbsdm = "no data" # pretty print the bbsdb prettyBBSdb = "" try: for i in range(len(bbsdb)): prettyBBSdb += f'
  • {bbsdb[i]}
  • ' except Exception as e: print(f"Error with database: {str(e)}") pass # pretty print the bbsdm prettyBBSdm = "" try: for i in range(len(bbsdm)): prettyBBSdm += f'
  • {bbsdm[i]}
  • ' except Exception as e: print(f"Error with database: {str(e)}") pass if 'no data' in [lemon_score, dopewar_score, blackjack_score, videopoker_score, mmind_score, golfsim_score]: database = "Error(s) Detected" else: database = " Online" return { 'database': database, "bbsdb": prettyBBSdb, "bbsdm": prettyBBSdm, 'lemon_score': lemon_score, 'dopewar_score': dopewar_score, 'blackjack_score': blackjack_score, 'videopoker_score': videopoker_score, 'mmind_score': mmind_score, 'golfsim_score': golfsim_score, 'banList': banList, 'adminList': adminList, 'sentryIgnoreList': sentryIgnoreList } def generate_main_html(log_data, system_info): html_template = """ MeshBot (BBS) Web Dashboard
    MeshBot (BBS) Web Dashboard
    Node Locations
    Network Activity
    Command Usage
    Message Types
    BBS Stored Message Counts
    Recent Commands
      ${command_timestamps}
    Recent Messages
      ${message_timestamps}
    Unique Users
    Warnings
    Errors
    """ template = Template(html_template) return template.safe_substitute( date=datetime.now().strftime('%Y_%m_%d'), command_data=json.dumps(log_data['command_counts']), message_data=json.dumps(log_data['message_types']), activity_data=json.dumps(log_data['hourly_activity']), bbs_messages=log_data['bbs_messages'], messages_waiting=log_data['messages_waiting'], total_messages=log_data['total_messages'], total_llm_queries=log_data['message_types']['LLM Query'], gps_coordinates=json.dumps(log_data['gps_coordinates']), unique_users='\n'.join(f'
  • {user}
  • ' for user in log_data['unique_users']), warnings='\n'.join(f'
  • {warning}
  • ' for warning in log_data['warnings']), errors='\n'.join(f'
  • {error}
  • ' for error in log_data['errors']), command_timestamps='\n'.join(f'
  • {timestamp}: {cmd}
  • ' for timestamp, cmd in reversed(log_data['command_timestamps'][-50:])), message_timestamps='\n'.join(f'
  • {timestamp}: {msg_type}
  • ' for timestamp, msg_type in reversed(log_data['message_timestamps'][-50:])) ) def generate_network_map_html(log_data): html_template = """ Network Map
    """ template = Template(html_template) return template.safe_substitute(gps_coordinates=json.dumps(log_data['gps_coordinates'])) def generate_sys_hosts_html(system_info): html_template = """ System Host Information

    System Host Information

    OS MetricValue
    Uptime${uptime}
    Total Memory${memory_total}
    Available Memory${memory_available}
    Total Disk Space${disk_total}
    Free Disk Space${disk_free}
    Meshtastic MetricValue
    API Version/Latest${cli_local} / ${cli_web}
    Int1 Name ID${node1_name} (${node1_ID})
    Int1 Stat${node1_uptime}
    Int1 FW Version${interface1_version}
    Int2 Name ID${node2_name} (${node2_ID})
    Int2 Stat${node2_uptime}
    Int2 FW Version${interface2_version}
    """ template = Template(html_template) return template.safe_substitute(system_info) def generate_wall_of_shame_html(shame_info): html_template = """ Wall Of Shame

    Collected Shame

    Shame MetricValue
    Shamefull words${shame}
    Shamefull messages${shameList}
    """ template = Template(html_template) return template.safe_substitute(shame_info) def generate_database_html(database_info): html_template = """ Database Information

    Database Information

    Connection ${database}

    config.ini SettingsValue
    Admin List${adminList}
    Ban List${banList}
    Sentry Ignore List${sentryIgnoreList}

    BBS Message Database

    BBSdb: ${bbsdb}

    BBSdm: ${bbsdm}

    High Scores

    GameHigh Score
    Lemonade Stand${lemon_score}
    Dopewars${dopewar_score}
    Blackjack${blackjack_score}
    Video Poker${videopoker_score}
    Mastermind${mmind_score}
    Golf Simulator${golfsim_score}
    """ template = Template(html_template) return template.safe_substitute(database_info) def main(): log_dir = LOG_PATH today = datetime.now().strftime('%Y_%m_%d') log_file = f'meshbot{today}.log' log_path = os.path.join(log_dir, log_file) if not os.path.exists(log_path): # set file_path to the cwd of the default project ../log file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'logs') file_path = os.path.abspath(file_path) log_path = os.path.join(file_path, log_file) log_data = parse_log_file(log_path) system_info = get_system_info() shame_info = get_wall_of_shame() database_info = get_database_info() main_html = generate_main_html(log_data, system_info) network_map_html = generate_network_map_html(log_data) hosts_html = generate_sys_hosts_html(system_info) wall_of_shame = generate_wall_of_shame_html(shame_info) database_html = generate_database_html(database_info) output_dir = W3_PATH index_path = os.path.join(output_dir, 'index.html') print(f"\n\nMeshBot (BBS) Web Dashboard Report Generator") print(f"\nMain dashboard: file://{index_path}\n") try: if not os.path.exists(output_dir): output_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'www') output_dir = os.path.abspath(output_dir) index_path = os.path.join(output_dir, 'index.html') # Create backup of existing index.html if it exists if os.path.exists(index_path): backup_path = os.path.join(output_dir, f'index_backup_{today}.html') os.rename(index_path, backup_path) print(f"Existing index.html backed up to {backup_path}") # Write main HTML to index.html with open(index_path, 'w') as f: f.write(main_html) print(f"Main dashboard written to {index_path}") # Write other HTML files with open(os.path.join(output_dir, f'network_map_{today}.html'), 'w') as f: f.write(network_map_html) with open(os.path.join(output_dir, f'hosts_{today}.html'), 'w') as f: f.write(hosts_html) with open(os.path.join(output_dir, f'wall_of_shame_{today}.html'), 'w') as f: f.write(wall_of_shame) with open(os.path.join(output_dir, f'database_{today}.html'), 'w') as f: f.write(database_html) print(f"HTML reports generated for {today} in {output_dir}") except PermissionError: print("Error: Permission denied. Please run the script with appropriate permissions (e.g., using sudo).") except Exception as e: print(f"An error occurred while writing the output: {str(e)}") if __name__ == "__main__": main()