diff --git a/app.py b/app.py
index 7a54506..fe7f2c6 100644
--- a/app.py
+++ b/app.py
@@ -3,10 +3,12 @@ from markupsafe import escape
from flask_socketio import SocketIO, emit
import serial
import threading
+from threading import Timer
import time
from collections import deque
-import pandas as pd
import re
+import requests
+from bs4 import BeautifulSoup
app = Flask(__name__)
socketio = SocketIO(app)
@@ -18,7 +20,6 @@ port3_status = True
global ser1
global ser2
global ser3
-global_dataframe = pd.DataFrame(columns=['Device Name', 'Frequency', 'Signal Strength', 'Plaintext'])
frequency = lambda port: {'port1': 433, 'port2': 868,'port3': 915}.get(port, None)
surveydata = {}
@@ -75,8 +76,78 @@ def read_serial_data(port, ser, buffer):
print(f"Error: {e}")
pass
+def parse_and_store_data():
+ global surveydata
+ url = "http://10.130.1.1/cgi-bin/log-traffic.has" # Your target URL
+ headers = {
+ "Host": "10.130.1.1",
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:122.0) Gecko/20100101 Firefox/122.0",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
+ "Accept-Language": "en-US,en;q=0.5",
+ "Accept-Encoding": "gzip, deflate",
+ "DNT": "1",
+ "Sec-GPC": "1",
+ "Authorization": "Basic cm9vdDpkcmFnaW5v",
+ "Connection": "keep-alive",
+ "Referer": "http://10.130.1.1/cgi-bin/log-lora.has",
+ "Upgrade-Insecure-Requests": "1"
+ }
+
+ response = requests.get(url, headers=headers)
+
+ if response.status_code == 200:
+ soup = BeautifulSoup(response.text, 'html.parser')
+ table = soup.find('table')
+ rows = table.find_all('tr')
+ headers = [header.text.strip() for header in rows[0].find_all('th')][1:]
+
+ for row in rows[1:]:
+ cells = row.find_all('td')
+ cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
+ formatted_row = ' | '.join(cell_data)
+
+ # Extract DevEui or DevAddr from the response
+ dev_id = extract_dev_id(formatted_row) # Implement this function based on your data format
+ freq = extract_freq(formatted_row) # Implement this function based on your data format
+
+ # Initialize dictionary for dev_id if not present
+ if dev_id not in surveydata:
+ surveydata[dev_id] = []
+
+ # Append new data to the list associated with the DevEui or DevAddr
+ surveydata[dev_id].append([freq, 0, formatted_row])
+ #surveydata[dev_id]['decoded_values'].append(formatted_row)
+
+ print("Data parsed and stored.")
+
+ else:
+ print(f"Request failed with status code: {response.status_code}")
+
+ # Schedule the next call to this function
+ Timer(60, parse_and_store_data).start() # Call this function every 60 seconds
+def extract_dev_id(formatted_row):
+ # Assuming DevEui or DevAddr is in the 'Content' part of the formatted_row
+ # and it's formatted like 'Dev Addr: {DevEui}, Size: {Size}'
+ try:
+ content_part = formatted_row.split('|')[-1].strip() # Get the last part of the formatted_row, which is 'Content'
+ dev_id = content_part.split(',')[0].split(':')[-1].strip() # Extract the DevEui or DevAddr
+ return dev_id
+ except Exception as e:
+ print(f"Error extracting DevEui/DevAddr: {e}")
+ return None # Return None or some default value if extraction fails
+
+
+def extract_freq(formatted_row):
+ # Assuming 'Freq' is a standalone field in the formatted_row
+ try:
+ freq_part = formatted_row.split('|')[3].strip() # Get the 'Freq' part (assuming it's the fifth field)
+ freq = float(freq_part) # Convert the frequency to float
+ return freq
+ except Exception as e:
+ print(f"Error extracting frequency: {e}")
+ return None # Return None or some default value if extraction fails
def connect_serial(port,frequency):
@@ -144,7 +215,7 @@ def analysis():
@app.route('/survey')
def survey():
- return render_template('survey.html', data=global_dataframe)
+ return render_template('survey.html')
@app.route('/tracking')
def tracking():
@@ -255,8 +326,16 @@ def checkSer():
@app.route('/get_table_data')
def get_table_data():
global surveydata
- print(surveydata)
- return jsonify(surveydata)
+ cleaned_data = {}
+
+ for dev_id, data in surveydata.items():
+ if dev_id: # Check if dev_id is not empty
+ cleaned_data[dev_id] = data
+
+ #print(cleaned_data) # For debugging
+ return jsonify(cleaned_data)
+
if __name__ == '__main__':
+ Timer(60, parse_and_store_data).start()
socketio.run(app, debug=True)
diff --git a/draginoReq.py b/draginoReq.py
new file mode 100644
index 0000000..4b9f5f6
--- /dev/null
+++ b/draginoReq.py
@@ -0,0 +1,58 @@
+import requests
+from bs4 import BeautifulSoup
+
+# Define the URL and headers
+url = "http://10.130.1.1/cgi-bin/log-traffic.has"
+headers = {
+ "Host": "10.130.1.1",
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:122.0) Gecko/20100101 Firefox/122.0",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
+ "Accept-Language": "en-US,en;q=0.5",
+ "Accept-Encoding": "gzip, deflate",
+ "DNT": "1",
+ "Sec-GPC": "1",
+ "Authorization": "Basic cm9vdDpkcmFnaW5v",
+ "Connection": "keep-alive",
+ "Referer": "http://10.130.1.1/cgi-bin/log-lora.has",
+ "Upgrade-Insecure-Requests": "1"
+}
+
+# Send the GET request
+response = requests.get(url, headers=headers)
+
+if response.status_code == 200:
+ # Parse the HTML content using BeautifulSoup
+ soup = BeautifulSoup(response.text, 'html.parser')
+
+ # Find the table
+ table = soup.find('table')
+
+ # Initialize an empty list to store formatted strings for each row
+ formatted_rows = []
+
+ # Find all table rows
+ rows = table.find_all('tr')
+
+ # Get column headers from the first row
+ # Using .text.strip() to clean the text and [1:] to skip the empty first column
+ headers = [header.text.strip() for header in rows[0].find_all('th')][1:]
+
+ # Iterate through each row (skipping the first row with the headers)
+ for row in rows[1:]:
+ # Find all data cells (td tags) in the row
+ cells = row.find_all('td')
+
+ # Extract text from each cell
+ # Using [1:] to skip the first cell with the arrow icon
+ cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
+
+ # Format the row data into a neat line
+ formatted_row = ' | '.join(cell_data)
+
+ # Append the formatted string to the list
+ formatted_rows.append(formatted_row)
+
+ # Print the formatted string to display the row
+ print(formatted_row)
+else:
+ print("Request failed with status code:", response.status_code)
\ No newline at end of file
diff --git a/examplereq b/examplereq
new file mode 100644
index 0000000..eb0ad18
--- /dev/null
+++ b/examplereq
@@ -0,0 +1,1767 @@
+
+
+
+
+