mirror of
https://github.com/skinnyrad/Lora-Scanner.git
synced 2026-03-28 17:43:00 +01:00
Feat: Integrating the 868 MHz dragino gateway
This commit is contained in:
65
app.py
65
app.py
@@ -82,54 +82,55 @@ def read_serial_data(port, ser, buffer):
|
||||
|
||||
def parse_and_store_data():
|
||||
global surveydata
|
||||
url = "http://10.130.1.1/cgi-bin/log-traffic.has" # Your target URL
|
||||
global parsed_entries
|
||||
|
||||
# Include the port number (8000) in your gateway URLs
|
||||
gateway_urls = [
|
||||
"http://192.168.1.24:8000/cgi-bin/log-traffic.has", # Gateway 1 (915 MHz)
|
||||
"http://192.168.1.25:8000/cgi-bin/log-traffic.has" # Gateway 2 (868 MHz)
|
||||
]
|
||||
|
||||
headers = {
|
||||
"Host": "10.130.1.1",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:122.0) Gecko/20100101 Firefox/122.0",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"DNT": "1",
|
||||
"Sec-GPC": "1",
|
||||
"Authorization": "Basic cm9vdDpkcmFnaW5v",
|
||||
"Authorization": "Basic cm9vdDpkcmFnaW5v", # Assumes the same credentials for both gateways
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "http://10.130.1.1/cgi-bin/log-lora.has",
|
||||
"Upgrade-Insecure-Requests": "1"
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers)
|
||||
for url in gateway_urls:
|
||||
headers["Host"] = url.split("//")[-1].split("/")[0] # Dynamically set the Host header
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
table = soup.find('table')
|
||||
rows = table.find_all('tr')
|
||||
headers = [header.text.strip() for header in rows[0].find_all('th')][1:]
|
||||
if response.status_code == 200:
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
table = soup.find('table')
|
||||
if table: # Check for a table in the response
|
||||
rows = table.find_all('tr')
|
||||
for row in rows[1:]: # Skip the header row
|
||||
cells = row.find_all('td')
|
||||
cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
|
||||
formatted_row = ' | '.join(cell_data)
|
||||
|
||||
for row in rows[1:]:
|
||||
cells = row.find_all('td')
|
||||
cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
|
||||
formatted_row = ' | '.join(cell_data)
|
||||
dev_id = extract_dev_id(formatted_row) # Assuming this function is defined elsewhere
|
||||
freq = extract_freq(formatted_row) # Assuming this function is defined elsewhere
|
||||
|
||||
dev_id = extract_dev_id(formatted_row) # Your existing function to extract DevEui or DevAddr
|
||||
freq = extract_freq(formatted_row) # Your existing function to extract frequency
|
||||
if dev_id and freq:
|
||||
entry_identifier = f"{dev_id}_{formatted_row}"
|
||||
|
||||
if dev_id and freq:
|
||||
entry_identifier = f"{dev_id}_{formatted_row}" # Create a unique identifier for the entry
|
||||
|
||||
# Only process the entry if we haven't seen this identifier before
|
||||
if entry_identifier not in parsed_entries:
|
||||
parsed_entries.add(entry_identifier) # Add the identifier to the set
|
||||
if entry_identifier not in parsed_entries:
|
||||
parsed_entries.add(entry_identifier)
|
||||
if dev_id not in surveydata:
|
||||
surveydata[dev_id] = []
|
||||
surveydata[dev_id].append([freq, 0, formatted_row])
|
||||
|
||||
# Initialize dictionary for dev_id if not present
|
||||
if dev_id not in surveydata:
|
||||
surveydata[dev_id] = []
|
||||
|
||||
# Append new data to the list associated with the DevEui or DevAddr
|
||||
surveydata[dev_id].append([freq, 0, formatted_row])
|
||||
|
||||
print("Data parsed and stored.")
|
||||
else:
|
||||
print(f"Request failed with status code: {response.status_code}")
|
||||
print(f"Data parsed and stored from {url}.")
|
||||
else:
|
||||
print(f"Request to {url} failed with status code: {response.status_code}")
|
||||
|
||||
|
||||
# Schedule the next call to this function
|
||||
|
||||
@@ -1,10 +1,35 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# Define the URL and headers
|
||||
url = "http://10.130.1.1/cgi-bin/log-traffic.has"
|
||||
def get_gateway_data(url, headers):
|
||||
"""
|
||||
Makes a request to the specified gateway URL and parses the HTML table content.
|
||||
Returns a list of formatted strings for each row in the table.
|
||||
"""
|
||||
formatted_rows = []
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
table = soup.find('table')
|
||||
if table:
|
||||
rows = table.find_all('tr')
|
||||
headers = [header.text.strip() for header in rows[0].find_all('th')][1:]
|
||||
for row in rows[1:]:
|
||||
cells = row.find_all('td')
|
||||
cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
|
||||
formatted_row = ' | '.join(cell_data)
|
||||
formatted_rows.append(formatted_row)
|
||||
else:
|
||||
print(f"Request to {url} failed with status code:", response.status_code)
|
||||
|
||||
return formatted_rows
|
||||
|
||||
|
||||
# Define URLs and headers for both gateways
|
||||
gateway_1 = "http://192.168.1.23/cgi-bin/log-traffic.has"
|
||||
gateway_2 = "http://192.168.1.24/cgi-bin/log-traffic.has"
|
||||
headers = {
|
||||
"Host": "10.130.1.1",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:122.0) Gecko/20100101 Firefox/122.0",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
@@ -13,46 +38,20 @@ headers = {
|
||||
"Sec-GPC": "1",
|
||||
"Authorization": "Basic cm9vdDpkcmFnaW5v",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "http://10.130.1.1/cgi-bin/log-lora.has",
|
||||
"Upgrade-Insecure-Requests": "1"
|
||||
}
|
||||
|
||||
# Send the GET request
|
||||
response = requests.get(url, headers=headers)
|
||||
# Fetch and print data from both gateways
|
||||
print("Fetching data from Gateway 1 (192.168.1.23)...")
|
||||
data_1 = get_gateway_data(gateway_1, headers)
|
||||
for row in data_1:
|
||||
print(row)
|
||||
|
||||
print("\nFetching data from Gateway 2 (192.168.1.24)...")
|
||||
# Update the 'Host' header for the second gateway if necessary
|
||||
headers["Host"] = "192.168.1.24"
|
||||
data_2 = get_gateway_data(gateway_2, headers)
|
||||
for row in data_2:
|
||||
print(row)
|
||||
|
||||
|
||||
if response.status_code == 200:
|
||||
# Parse the HTML content using BeautifulSoup
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
|
||||
# Find the table
|
||||
table = soup.find('table')
|
||||
|
||||
# Initialize an empty list to store formatted strings for each row
|
||||
formatted_rows = []
|
||||
|
||||
# Find all table rows
|
||||
rows = table.find_all('tr')
|
||||
|
||||
# Get column headers from the first row
|
||||
# Using .text.strip() to clean the text and [1:] to skip the empty first column
|
||||
headers = [header.text.strip() for header in rows[0].find_all('th')][1:]
|
||||
|
||||
# Iterate through each row (skipping the first row with the headers)
|
||||
for row in rows[1:]:
|
||||
# Find all data cells (td tags) in the row
|
||||
cells = row.find_all('td')
|
||||
|
||||
# Extract text from each cell
|
||||
# Using [1:] to skip the first cell with the arrow icon
|
||||
cell_data = [cell.text.strip() for cell in cells[1:] if cells.index(cell) < len(headers) + 1]
|
||||
|
||||
# Format the row data into a neat line
|
||||
formatted_row = ' | '.join(cell_data)
|
||||
|
||||
# Append the formatted string to the list
|
||||
formatted_rows.append(formatted_row)
|
||||
|
||||
# Print the formatted string to display the row
|
||||
print(formatted_row)
|
||||
else:
|
||||
print("Request failed with status code:", response.status_code)
|
||||
Reference in New Issue
Block a user