Work on db cleanup tool

This commit is contained in:
Pablo Revilla
2025-09-18 09:37:24 -07:00
parent 6eb1cdbd2d
commit 4f4c18fa14
4 changed files with 7 additions and 21 deletions

View File

@@ -5,7 +5,7 @@
The project serves as a real-time monitoring and diagnostic tool for the Meshtastic mesh network. It provides detailed insights into the network's activity, including message traffic, node positions, and telemetry data.
### Version 2.0.7 update - September 2025
* New capability to automatically trim the database to a keep specific amount of days of data
* Added configuaration of intervals for both the Live Map and the Firehose pages
* Added configuaration for update intervals for both the Live Map and the Firehose pages
### Version 2.0.6 update - August 2025
* New Live Map (Shows packet feed live)
* New API /api/config (See API documentation)

View File

@@ -6,22 +6,12 @@ engine = None
async_session = None
def init_database(database_connection_string, read_only=False):
def init_database(database_connection_string):
global engine, async_session
kwargs = {"echo": False}
if database_connection_string.startswith("sqlite"):
if read_only:
# Ensure SQLite is opened in read-only mode
database_connection_string += "?mode=ro"
kwargs["connect_args"] = {"uri": True}
else:
kwargs["connect_args"] = {"timeout": 300}
else:
kwargs["pool_size"] = 20
kwargs["max_overflow"] = 50
# Ensure SQLite is opened in read-only mode
database_connection_string += "?mode=ro"
kwargs["connect_args"] = {"uri": True}
engine = create_async_engine(database_connection_string, **kwargs)
async_session = async_sessionmaker( bind=engine,
class_=AsyncSession,

View File

@@ -3,11 +3,7 @@ from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
def init_database(database_connection_string):
global engine, async_session
kwargs = {}
if not database_connection_string.startswith('sqlite'):
kwargs['pool_size'] = 20
kwargs['max_overflow'] = 50
engine = create_async_engine(database_connection_string, echo=False, connect_args={"timeout": 300})
engine = create_async_engine(database_connection_string, echo=False, connect_args={"timeout": 900})
async_session = async_sessionmaker(engine, expire_on_commit=False)
async def create_tables():

View File

@@ -78,7 +78,7 @@ async def daily_cleanup_at(db_file: str, hour: int = 2, minute: int = 0, days_to
total_deleted += deleted
if deleted == 0:
break
await asyncio.sleep(0) # yield to event loop
await asyncio.yield_now()
cleanup_logger.info(f"Deleted a total of {total_deleted} rows from {table}")