15 Commits

Author SHA1 Message Date
Jack Kingsman
9fbdbaa174 Updating changelog + build for 2.7.9 2026-03-08 22:18:59 -07:00
Jack Kingsman
e99e522573 Fix clipping on integration add drop down 2026-03-08 22:17:32 -07:00
Jack Kingsman
9d806c608b Add contact normalization rather than loading the packed path bytes 2026-03-08 21:01:01 -07:00
Jack Kingsman
5a9489eff1 Updating changelog + build for 2.7.8 2026-03-08 20:47:09 -07:00
Jack Kingsman
beb28b1f31 Updating changelog + build for 2.7.8 2026-03-08 20:42:03 -07:00
Jack Kingsman
7d688fa5f8 Move to more stable docker reqs without disrupting windows users 2026-03-08 20:38:33 -07:00
Jack Kingsman
09b68c37ba Better ci scripts 2026-03-08 19:56:58 -07:00
Jack Kingsman
df7dbad73d Fix bad file refs in decoder that break npm 10 2026-03-08 19:56:44 -07:00
Jack Kingsman
060fb1ef59 Updating changelog + build for 2.7.1 2026-03-08 18:48:14 -07:00
Jack Kingsman
b14e99ff24 Patch a bizarre browser quirk of leaky elements (???) in the packet list 2026-03-08 18:45:07 -07:00
Jack Kingsman
77523c1b15 Patch up to use a published patched meshcore-decoder and add a test script for different node versions 2026-03-08 18:35:58 -07:00
Jack Kingsman
9673b25ab3 yeeeikes fix raw packet feed sorry 2026-03-08 17:38:20 -07:00
Jack Kingsman
2732506f3c Fix historical DM packet length passing and fix up some docs 2026-03-08 17:12:36 -07:00
Jack Kingsman
523fe3e28e Updating changelog + build for 2.7.0 2026-03-08 16:23:23 -07:00
Jack Kingsman
3663db6ed3 Multibyte path support 2026-03-08 14:53:14 -07:00
27 changed files with 8244 additions and 96 deletions

View File

@@ -1,3 +1,28 @@
## [2.7.9] - 2026-03-08
Bugfix: Don't obscure new integration dropdown on session boundary
## [2.7.8] - 2026-03-08
## [2.7.8] - 2026-03-08
Bugfix: Improve frontend asset resolution and fixup the build/push script
## [2.7.1] - 2026-03-08
Bugfix: Fix historical DM packet length passing
Misc: Follow better inclusion patterns for the patched meshcore-decoder and just publish the dang package
Misc: Patch a bewildering browser quirk that cause large raw packet lists to extend past the bottom of the page
## [2.7.0] - 2026-03-08
Feature: Multibyte path support
Feature: Add multibyte statistics to statistics pane
Feature: Add path bittage to contact info pane
Feature: Put tools in a collapsible
## [2.6.1] - 2026-03-08
Misc: Fix busted docker builds; we don't have a 2.6.0 build sorry

View File

@@ -6,7 +6,6 @@ ARG COMMIT_HASH=unknown
WORKDIR /build
COPY frontend/package.json frontend/.npmrc ./
COPY frontend/lib/meshcore-decoder ./lib/meshcore-decoder
RUN npm install
COPY frontend/ ./

View File

@@ -1141,7 +1141,7 @@ SOFTWARE.
</details>
### meshcore-hashtag-cracker (1.10.0) — MIT
### meshcore-hashtag-cracker (1.11.0) — MIT
<details>
<summary>Full license text</summary>

View File

@@ -20,7 +20,7 @@ app/
├── database.py # SQLite connection + base schema + migration runner
├── migrations.py # Schema migrations (SQLite user_version)
├── models.py # Pydantic request/response models
├── repository/ # Data access layer (contacts, channels, messages, raw_packets, settings)
├── repository/ # Data access layer (contacts, channels, messages, raw_packets, settings, fanout)
├── radio.py # RadioManager + auto-reconnect monitor
├── radio_sync.py # Polling, sync, periodic advertisement loop
├── decoder.py # Packet parsing/decryption
@@ -29,6 +29,7 @@ app/
├── websocket.py # WS manager + broadcast helpers
├── fanout/ # Fanout bus: MQTT, bots, webhooks, Apprise (see fanout/AGENTS_fanout.md)
├── dependencies.py # Shared FastAPI dependency providers
├── path_utils.py # Path hex rendering and hop-width helpers
├── keystore.py # Ephemeral private/public key storage for DM decryption
├── frontend_static.py # Mount/serve built frontend (production)
└── routers/
@@ -296,6 +297,10 @@ tests/
├── test_send_messages.py # Outgoing messages, bot triggers, concurrent sends
├── test_settings_router.py # Settings endpoints, advert validation
├── test_statistics.py # Statistics aggregation
├── test_channel_sender_backfill.py # Sender key backfill for channel messages
├── test_fanout_hitlist.py # Fanout-related hitlist regression tests
├── test_main_startup.py # App startup and lifespan
├── test_path_utils.py # Path hex rendering helpers
├── test_websocket.py # WS manager broadcast/cleanup
└── test_websocket_route.py # WS endpoint lifecycle
```

View File

@@ -2,6 +2,8 @@ from typing import Literal
from pydantic import BaseModel, Field
from app.path_utils import normalize_contact_route
class Contact(BaseModel):
public_key: str = Field(description="Public key (64-char hex)")
@@ -26,14 +28,19 @@ class Contact(BaseModel):
The radio API uses different field names (adv_name, out_path, etc.)
than our database schema (name, last_path, etc.).
"""
last_path, last_path_len, out_path_hash_mode = normalize_contact_route(
self.last_path,
self.last_path_len,
self.out_path_hash_mode,
)
return {
"public_key": self.public_key,
"adv_name": self.name or "",
"type": self.type,
"flags": self.flags,
"out_path": self.last_path or "",
"out_path_len": self.last_path_len,
"out_path_hash_mode": self.out_path_hash_mode,
"out_path": last_path,
"out_path_len": last_path_len,
"out_path_hash_mode": out_path_hash_mode,
"adv_lat": self.lat if self.lat is not None else 0.0,
"adv_lon": self.lon if self.lon is not None else 0.0,
"last_advert": self.last_advert if self.last_advert is not None else 0,
@@ -46,17 +53,22 @@ class Contact(BaseModel):
This is the inverse of to_radio_dict(), used when syncing contacts
from radio to database.
"""
last_path, last_path_len, out_path_hash_mode = normalize_contact_route(
radio_data.get("out_path"),
radio_data.get("out_path_len", -1),
radio_data.get(
"out_path_hash_mode",
-1 if radio_data.get("out_path_len", -1) == -1 else 0,
),
)
return {
"public_key": public_key,
"name": radio_data.get("adv_name"),
"type": radio_data.get("type", 0),
"flags": radio_data.get("flags", 0),
"last_path": radio_data.get("out_path"),
"last_path_len": radio_data.get("out_path_len", -1),
"out_path_hash_mode": radio_data.get(
"out_path_hash_mode",
-1 if radio_data.get("out_path_len", -1) == -1 else 0,
),
"last_path": last_path,
"last_path_len": last_path_len,
"out_path_hash_mode": out_path_hash_mode,
"lat": radio_data.get("adv_lat"),
"lon": radio_data.get("adv_lon"),
"last_advert": radio_data.get("last_advert"),

View File

@@ -148,3 +148,57 @@ def first_hop_hex(path_hex: str, hop_count: int) -> str | None:
"""
hops = split_path_hex(path_hex, hop_count)
return hops[0] if hops else None
def normalize_contact_route(
path_hex: str | None,
path_len: int | None,
out_path_hash_mode: int | None,
) -> tuple[str, int, int]:
"""Normalize stored contact route fields.
Handles legacy/bad rows where the packed wire path byte was stored directly
in `last_path_len` (sometimes as a signed byte, e.g. `-125` for `0x83`).
Returns `(path_hex, hop_count, hash_mode)`.
"""
normalized_path = path_hex or ""
try:
normalized_len = int(path_len) if path_len is not None else -1
except (TypeError, ValueError):
normalized_len = -1
try:
normalized_mode = int(out_path_hash_mode) if out_path_hash_mode is not None else None
except (TypeError, ValueError):
normalized_mode = None
if normalized_len < -1 or normalized_len > 63:
packed = normalized_len & 0xFF
if packed == 0xFF:
return "", -1, -1
decoded_mode = (packed >> 6) & 0x03
if decoded_mode != 0x03:
normalized_len = packed & 0x3F
normalized_mode = decoded_mode
if normalized_len == -1:
return "", -1, -1
if normalized_mode not in (0, 1, 2):
normalized_mode = 0
if normalized_path:
bytes_per_hop = normalized_mode + 1
actual_bytes = len(normalized_path) // 2
expected_bytes = normalized_len * bytes_per_hop
if actual_bytes > expected_bytes >= 0:
normalized_path = normalized_path[: expected_bytes * 2]
elif (
actual_bytes < expected_bytes
and bytes_per_hop > 0
and actual_bytes % bytes_per_hop == 0
):
normalized_len = actual_bytes // bytes_per_hop
return normalized_path, normalized_len, normalized_mode

View File

@@ -30,6 +30,21 @@ from app.repository import (
logger = logging.getLogger(__name__)
def _contact_sync_debug_fields(contact: Contact) -> dict[str, object]:
"""Return key contact fields for sync failure diagnostics."""
return {
"type": contact.type,
"flags": contact.flags,
"last_path": contact.last_path,
"last_path_len": contact.last_path_len,
"out_path_hash_mode": contact.out_path_hash_mode,
"last_advert": contact.last_advert,
"lat": contact.lat,
"lon": contact.lon,
"on_radio": contact.on_radio,
}
async def upsert_channel_from_radio_slot(payload: dict, *, on_radio: bool) -> str | None:
"""Parse a radio channel-slot payload and upsert to the database.
@@ -664,7 +679,8 @@ async def _sync_contacts_to_radio_inner(mc: MeshCore) -> dict:
continue
try:
result = await mc.commands.add_contact(contact.to_radio_dict())
radio_contact_payload = contact.to_radio_dict()
result = await mc.commands.add_contact(radio_contact_payload)
if result.type == EventType.OK:
loaded += 1
await ContactRepository.set_on_radio(contact.public_key, True)
@@ -687,7 +703,14 @@ async def _sync_contacts_to_radio_inner(mc: MeshCore) -> dict:
)
except Exception as e:
failed += 1
logger.warning("Error loading contact %s: %s", contact.public_key[:12], e)
logger.warning(
"Error loading contact %s with fields=%s radio_payload=%s: %s",
contact.public_key[:12],
_contact_sync_debug_fields(contact),
locals().get("radio_contact_payload"),
e,
exc_info=True,
)
if loaded > 0 or failed > 0:
logger.info(

View File

@@ -8,7 +8,7 @@ from app.models import (
ContactAdvertPathSummary,
ContactNameHistory,
)
from app.path_utils import first_hop_hex
from app.path_utils import first_hop_hex, normalize_contact_route
class AmbiguousPublicKeyPrefixError(ValueError):
@@ -23,9 +23,11 @@ class AmbiguousPublicKeyPrefixError(ValueError):
class ContactRepository:
@staticmethod
async def upsert(contact: dict[str, Any]) -> None:
out_path_hash_mode = contact.get("out_path_hash_mode")
if out_path_hash_mode is None:
out_path_hash_mode = -1 if contact.get("last_path_len", -1) == -1 else 0
last_path, last_path_len, out_path_hash_mode = normalize_contact_route(
contact.get("last_path"),
contact.get("last_path_len", -1),
contact.get("out_path_hash_mode"),
)
await db.conn.execute(
"""
@@ -54,8 +56,8 @@ class ContactRepository:
contact.get("name"),
contact.get("type", 0),
contact.get("flags", 0),
contact.get("last_path"),
contact.get("last_path_len", -1),
last_path,
last_path_len,
out_path_hash_mode,
contact.get("last_advert"),
contact.get("lat"),
@@ -71,14 +73,19 @@ class ContactRepository:
@staticmethod
def _row_to_contact(row) -> Contact:
"""Convert a database row to a Contact model."""
last_path, last_path_len, out_path_hash_mode = normalize_contact_route(
row["last_path"],
row["last_path_len"],
row["out_path_hash_mode"],
)
return Contact(
public_key=row["public_key"],
name=row["name"],
type=row["type"],
flags=row["flags"],
last_path=row["last_path"],
last_path_len=row["last_path_len"],
out_path_hash_mode=row["out_path_hash_mode"],
last_path=last_path,
last_path_len=last_path_len,
out_path_hash_mode=out_path_hash_mode,
last_advert=row["last_advert"],
lat=row["lat"],
lon=row["lon"],
@@ -215,11 +222,22 @@ class ContactRepository:
path_len: int,
out_path_hash_mode: int | None = None,
) -> None:
normalized_path, normalized_path_len, normalized_hash_mode = normalize_contact_route(
path,
path_len,
out_path_hash_mode,
)
await db.conn.execute(
"""UPDATE contacts SET last_path = ?, last_path_len = ?,
out_path_hash_mode = COALESCE(?, out_path_hash_mode),
last_seen = ? WHERE public_key = ?""",
(path, path_len, out_path_hash_mode, int(time.time()), public_key.lower()),
(
normalized_path,
normalized_path_len,
normalized_hash_mode,
int(time.time()),
public_key.lower(),
),
)
await db.conn.commit()

View File

@@ -71,6 +71,7 @@ async def _run_historical_channel_decryption(
timestamp=result.timestamp,
received_at=packet_timestamp,
path=path_hex,
path_len=packet_info.path_length if packet_info else None,
realtime=False, # Historical decryption should not trigger fanout
)

View File

@@ -13,7 +13,7 @@ services:
# Set your serial device for passthrough here! #
################################################
devices:
- /dev/ttyUSB0:/dev/ttyUSB0
- /dev/ttyACM0:/dev/ttyUSB0
environment:
MESHCORE_DATABASE_PATH: data/meshcore.db

View File

@@ -12,9 +12,9 @@ Keep it aligned with `frontend/src` source code.
- Tailwind utility classes + local CSS (`index.css`, `styles.css`)
- Sonner (toasts)
- Leaflet / react-leaflet (map)
- Vendored `@michaelhart/meshcore-decoder` in `frontend/lib/meshcore-decoder` (local file dependency for multibyte-support build)
- `@michaelhart/meshcore-decoder` installed via npm alias to `meshcore-decoder-multibyte-patch`
- `meshcore-hashtag-cracker` + `nosleep.js` (channel cracker)
- `@michaelhart/meshcore-decoder` pinned to the multibyte-aware `jkingsman/meshcore-decoder-multibyte` fork
- Multibyte-aware decoder build published as `meshcore-decoder-multibyte-patch`
## Frontend Map
@@ -141,8 +141,6 @@ frontend/src/
├── useWebSocket.dispatch.test.ts
└── useWebSocket.lifecycle.test.ts
frontend/lib/
└── meshcore-decoder/ # Vendored local decoder package used by app + hashtag cracker
```
## Architecture Notes

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"name": "remoteterm-meshcore-frontend",
"private": true,
"version": "2.6.1",
"version": "2.7.9",
"type": "module",
"scripts": {
"dev": "vite",
@@ -17,7 +17,7 @@
"dependencies": {
"@codemirror/lang-python": "^6.2.1",
"@codemirror/theme-one-dark": "^6.1.3",
"@michaelhart/meshcore-decoder": "file:./lib/meshcore-decoder",
"@michaelhart/meshcore-decoder": "npm:meshcore-decoder-multibyte-patch@0.2.7",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
@@ -31,7 +31,7 @@
"d3-force-3d": "^3.0.6",
"leaflet": "^1.9.4",
"lucide-react": "^0.562.0",
"meshcore-hashtag-cracker": "^1.10.0",
"meshcore-hashtag-cracker": "^1.11.0",
"nosleep.js": "^0.12.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
@@ -64,10 +64,5 @@
"typescript-eslint": "^8.19.0",
"vite": "^6.0.3",
"vitest": "^2.1.0"
},
"overrides": {
"meshcore-hashtag-cracker": {
"@michaelhart/meshcore-decoder": "file:./lib/meshcore-decoder"
}
}
}

View File

@@ -614,7 +614,7 @@ export function App() {
const settingsSidebarContent = (
<nav
className="sidebar w-60 h-full min-h-0 bg-card border-r border-border flex flex-col"
className="sidebar w-60 h-full min-h-0 overflow-hidden bg-card border-r border-border flex flex-col"
aria-label="Settings"
>
<div className="flex justify-between items-center px-3 py-2.5 border-b border-border">
@@ -631,7 +631,7 @@ export function App() {
&larr; Back to Chat
</button>
</div>
<div className="flex-1 overflow-y-auto py-1">
<div className="flex-1 min-h-0 overflow-y-auto py-1 [contain:layout_paint]">
{SETTINGS_SECTION_ORDER.map((section) => (
<button
key={section}
@@ -681,7 +681,7 @@ export function App() {
<div className="flex flex-1 overflow-hidden">
{/* Desktop sidebar - hidden on mobile */}
<div className="hidden md:block">{activeSidebarContent}</div>
<div className="hidden md:block min-h-0 overflow-hidden">{activeSidebarContent}</div>
{/* Mobile sidebar - Sheet that slides in */}
<Sheet open={sidebarOpen} onOpenChange={setSidebarOpen}>

View File

@@ -202,14 +202,17 @@ export function RawPacketList({ packets }: RawPacketListProps) {
if (packets.length === 0) {
return (
<div className="h-full overflow-y-auto p-5 text-center text-muted-foreground">
<div className="h-full overflow-y-auto p-5 text-center text-muted-foreground [contain:layout_paint]">
No packets received yet. Packets will appear here in real-time.
</div>
);
}
return (
<div className="h-full overflow-y-auto p-4 flex flex-col gap-2" ref={listRef}>
<div
className="h-full overflow-y-auto p-4 flex flex-col gap-2 [contain:layout_paint]"
ref={listRef}
>
{sortedPackets.map(({ packet, decoded }) => (
<div
key={getRawPacketObservationKey(packet)}

View File

@@ -123,7 +123,7 @@ export function SettingsModal(props: SettingsModalProps) {
const shouldRenderSection = (section: SettingsSection) =>
!externalDesktopSidebarMode || desktopSection === section;
const sectionWrapperClass = 'overflow-hidden';
const sectionWrapperClass = '';
const sectionContentClass = externalDesktopSidebarMode
? 'mx-auto w-full max-w-[800px] space-y-4 p-4'

View File

@@ -625,7 +625,7 @@ export function Sidebar({
return (
<nav
className="sidebar w-60 h-full min-h-0 bg-card border-r border-border flex flex-col"
className="sidebar w-60 h-full min-h-0 overflow-hidden bg-card border-r border-border flex flex-col"
aria-label="Conversations"
>
{/* Header */}
@@ -668,7 +668,7 @@ export function Sidebar({
</div>
{/* List */}
<div className="flex-1 overflow-y-auto">
<div className="flex-1 min-h-0 overflow-y-auto [contain:layout_paint]">
{/* Tools */}
{toolRows.length > 0 && (
<>

View File

@@ -231,6 +231,17 @@ describe('SettingsModal', () => {
expect(screen.queryByLabelText('Preset')).not.toBeInTheDocument();
});
it('does not clip the fanout add-integration menu in external desktop mode', () => {
renderModal({
externalSidebarNav: true,
desktopSection: 'fanout',
});
const addIntegrationButton = screen.getByRole('button', { name: 'Add Integration' });
const wrapperSection = addIntegrationButton.closest('section');
expect(wrapperSection).not.toHaveClass('overflow-hidden');
});
it('applies the centered 800px column layout to non-fanout settings content', () => {
renderModal({
externalSidebarNav: true,

View File

@@ -1,6 +1,6 @@
[project]
name = "remoteterm-meshcore"
version = "2.6.1"
version = "2.7.9"
description = "RemoteTerm - Web interface for MeshCore radio mesh networks"
readme = "README.md"
requires-python = ">=3.10"

View File

@@ -7,6 +7,9 @@ set -euo pipefail
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
OUT="${1:-$REPO_ROOT/LICENSES.md}"
FRONTEND_DOCKER_LOCK="$REPO_ROOT/frontend/package-lock.docker.json"
FRONTEND_LICENSE_IMAGE="${FRONTEND_LICENSE_IMAGE:-node:20-slim}"
FRONTEND_LICENSE_NPM="${FRONTEND_LICENSE_NPM:-10.9.5}"
# ── Backend (Python) — uses pip-licenses ─────────────────────────────
backend_licenses() {
@@ -55,56 +58,33 @@ for d in data:
}
# ── Frontend (npm) ───────────────────────────────────────────────────
frontend_licenses() {
frontend_licenses_local() {
cd "$REPO_ROOT/frontend"
node -e "
const fs = require('fs');
const path = require('path');
const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
const depNames = Object.keys(pkg.dependencies || {}).sort((a, b) =>
a.toLowerCase().localeCompare(b.toLowerCase())
);
for (const name of depNames) {
const pkgDir = path.join('node_modules', name);
let version = 'unknown';
let licenseType = 'Unknown';
let licenseText = null;
// Read package.json for version + license type
try {
const depPkg = JSON.parse(fs.readFileSync(path.join(pkgDir, 'package.json'), 'utf8'));
version = depPkg.version || version;
licenseType = depPkg.license || licenseType;
} catch {}
// Find license file (case-insensitive search)
try {
const files = fs.readdirSync(pkgDir);
const licFile = files.find(f => /^(licen[sc]e|copying)/i.test(f));
if (licFile) {
licenseText = fs.readFileSync(path.join(pkgDir, licFile), 'utf8').trim();
}
} catch {}
console.log('### ' + name + ' (' + version + ') — ' + licenseType + '\n');
if (licenseText) {
console.log('<details>');
console.log('<summary>Full license text</summary>');
console.log();
console.log('\`\`\`');
console.log(licenseText);
console.log('\`\`\`');
console.log();
console.log('</details>');
} else {
console.log('*License file not found in package.*');
}
console.log();
node "$REPO_ROOT/scripts/print_frontend_licenses.cjs"
}
"
frontend_licenses_docker() {
docker run --rm \
-v "$REPO_ROOT:/src:ro" \
-w /tmp \
"$FRONTEND_LICENSE_IMAGE" \
bash -lc "
set -euo pipefail
cp -a /src/frontend ./frontend
cd frontend
cp package-lock.docker.json package-lock.json
npm i -g npm@$FRONTEND_LICENSE_NPM >/dev/null
npm ci --ignore-scripts >/dev/null
node /src/scripts/print_frontend_licenses.cjs
"
}
frontend_licenses() {
if [ -f "$FRONTEND_DOCKER_LOCK" ]; then
frontend_licenses_docker
else
frontend_licenses_local
fi
}
# ── Assemble ─────────────────────────────────────────────────────────

52
scripts/docker_ci.sh Normal file
View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
NODE_VERSIONS=("20" "22" "24")
# Use explicit npm patch versions so resolver regressions are caught.
NPM_VERSIONS=("9.1.1" "9.9.4" "10.9.5" "11.6.2")
echo -e "${YELLOW}=== Frontend Docker CI Matrix ===${NC}"
echo -e "${BLUE}Repo:${NC} $SCRIPT_DIR"
echo
run_combo() {
local node_version="$1"
local npm_version="$2"
local image="node:${node_version}-slim"
echo -e "${YELLOW}=== Node ${node_version} / npm ${npm_version} ===${NC}"
docker run --rm \
-v "$SCRIPT_DIR:/src:ro" \
-w /tmp \
"$image" \
bash -lc "
set -euo pipefail
cp -a /src/frontend ./frontend
cd frontend
npm i -g npm@${npm_version}
echo 'Using Node:' \$(node -v)
echo 'Using npm:' \$(npm -v)
npm install
npm run build
"
echo -e "${GREEN}Passed:${NC} Node ${node_version} / npm ${npm_version}"
echo
}
for node_version in "${NODE_VERSIONS[@]}"; do
for npm_version in "${NPM_VERSIONS[@]}"; do
run_combo "$node_version" "$npm_version"
done
done
echo -e "${GREEN}=== Docker CI matrix passed ===${NC}"

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
echo -e "${YELLOW}=== Extended Quality Checks ===${NC}"
echo
echo -e "${BLUE}[all_quality]${NC} Running full lint, typecheck, unit tests, and builds..."
"$SCRIPT_DIR/scripts/all_quality.sh"
echo -e "${GREEN}[all_quality]${NC} Passed!"
echo
echo -e "${BLUE}[e2e]${NC} Running end-to-end tests..."
"$SCRIPT_DIR/scripts/e2e.sh" "$@"
echo -e "${GREEN}[e2e]${NC} Passed!"
echo
echo -e "${BLUE}[docker_ci]${NC} Running Docker frontend install/build matrix..."
"$SCRIPT_DIR/scripts/docker_ci.sh"
echo -e "${GREEN}[docker_ci]${NC} Passed!"
echo
echo -e "${GREEN}=== Extended quality checks passed! ===${NC}"

View File

@@ -0,0 +1,43 @@
const fs = require('fs');
const path = require('path');
const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
const depNames = Object.keys(pkg.dependencies || {}).sort((a, b) =>
a.toLowerCase().localeCompare(b.toLowerCase())
);
for (const name of depNames) {
const pkgDir = path.join('node_modules', name);
let version = 'unknown';
let licenseType = 'Unknown';
let licenseText = null;
try {
const depPkg = JSON.parse(fs.readFileSync(path.join(pkgDir, 'package.json'), 'utf8'));
version = depPkg.version || version;
licenseType = depPkg.license || licenseType;
} catch {}
try {
const files = fs.readdirSync(pkgDir);
const licFile = files.find((file) => /^(licen[sc]e|copying)/i.test(file));
if (licFile) {
licenseText = fs.readFileSync(path.join(pkgDir, licFile), 'utf8').trim();
}
} catch {}
console.log(`### ${name} (${version}) — ${licenseType}\n`);
if (licenseText) {
console.log('<details>');
console.log('<summary>Full license text</summary>');
console.log();
console.log('```');
console.log(licenseText);
console.log('```');
console.log();
console.log('</details>');
} else {
console.log('*License file not found in package.*');
}
console.log();
}

View File

@@ -148,8 +148,9 @@ git push
echo -e "${GREEN}Changes committed!${NC}"
echo
# Get git short hash (after commit so it reflects the new commit)
# Get git hashes (after commit so they reflect the new commit)
GIT_HASH=$(git rev-parse --short HEAD)
FULL_GIT_HASH=$(git rev-parse HEAD)
# Build docker image
echo -e "${YELLOW}Building Docker image...${NC}"
@@ -168,6 +169,38 @@ docker push jkingsman/remoteterm-meshcore:$GIT_HASH
echo -e "${GREEN}Docker push complete!${NC}"
echo
# Create GitHub release using the changelog notes for this version.
echo -e "${YELLOW}Creating GitHub release...${NC}"
RELEASE_NOTES_FILE=$(mktemp)
{
echo "$CHANGELOG_HEADER"
echo
echo "$CHANGELOG_ENTRY"
} > "$RELEASE_NOTES_FILE"
# Create and push the release tag first so GitHub release creation does not
# depend on resolving a symbolic ref like HEAD on the remote side.
if git rev-parse -q --verify "refs/tags/$VERSION" >/dev/null; then
echo -e "${YELLOW}Tag $VERSION already exists locally; reusing it.${NC}"
else
git tag "$VERSION" "$FULL_GIT_HASH"
fi
if git ls-remote --exit-code --tags origin "refs/tags/$VERSION" >/dev/null 2>&1; then
echo -e "${YELLOW}Tag $VERSION already exists on origin; not pushing it again.${NC}"
else
git push origin "$VERSION"
fi
gh release create "$VERSION" \
--title "$VERSION" \
--notes-file "$RELEASE_NOTES_FILE" \
--verify-tag
rm -f "$RELEASE_NOTES_FILE"
echo -e "${GREEN}GitHub release created!${NC}"
echo
echo -e "${GREEN}=== Publish complete! ===${NC}"
echo -e "Version: ${YELLOW}$VERSION${NC}"
echo -e "Git hash: ${YELLOW}$GIT_HASH${NC}"
@@ -175,3 +208,5 @@ echo -e "Docker tags pushed:"
echo -e " - jkingsman/remoteterm-meshcore:latest"
echo -e " - jkingsman/remoteterm-meshcore:$VERSION"
echo -e " - jkingsman/remoteterm-meshcore:$GIT_HASH"
echo -e "GitHub release:"
echo -e " - $VERSION"

View File

@@ -5,6 +5,7 @@ import pytest
from app.path_utils import (
decode_path_byte,
first_hop_hex,
normalize_contact_route,
parse_packet_envelope,
path_wire_len,
split_path_hex,
@@ -153,6 +154,26 @@ class TestFirstHopHex:
assert first_hop_hex("", 0) is None
class TestNormalizeContactRoute:
def test_decodes_legacy_signed_packed_len(self):
path_hex, path_len, hash_mode = normalize_contact_route("3f3f69de1c7b7e7662", -125, 2)
assert path_hex == "3f3f69de1c7b7e7662"
assert path_len == 3
assert hash_mode == 2
def test_decodes_legacy_unsigned_packed_len(self):
path_hex, path_len, hash_mode = normalize_contact_route("7e7662ae9258", 130, None)
assert path_hex == "7e7662ae9258"
assert path_len == 2
assert hash_mode == 2
def test_normalizes_flood_to_empty_path(self):
path_hex, path_len, hash_mode = normalize_contact_route("abcd", -1, 2)
assert path_hex == ""
assert path_len == -1
assert hash_mode == -1
class TestContactToRadioDictHashMode:
"""Test that Contact.to_radio_dict() preserves the stored out_path_hash_mode."""
@@ -216,6 +237,20 @@ class TestContactToRadioDictHashMode:
d = c.to_radio_dict()
assert d["out_path_hash_mode"] == 1
def test_decodes_legacy_signed_packed_len_before_radio_sync(self):
from app.models import Contact
c = Contact(
public_key="ff" * 32,
last_path="3f3f69de1c7b7e7662",
last_path_len=-125,
out_path_hash_mode=2,
)
d = c.to_radio_dict()
assert d["out_path"] == "3f3f69de1c7b7e7662"
assert d["out_path_len"] == 3
assert d["out_path_hash_mode"] == 2
class TestContactFromRadioDictHashMode:
"""Test that Contact.from_radio_dict() preserves explicit path hash mode."""

View File

@@ -377,6 +377,33 @@ class TestSyncRecentContactsToRadio:
assert payload["out_path_len"] == 2
assert payload["out_path_hash_mode"] == 1
@pytest.mark.asyncio
async def test_add_contact_decodes_legacy_packed_path_len(self, test_db):
"""Legacy signed packed path bytes are normalized before add_contact."""
await _insert_contact(
KEY_A,
"Alice",
last_contacted=2000,
last_path="3f3f69de1c7b7e7662",
last_path_len=-125,
out_path_hash_mode=2,
)
mock_mc = MagicMock()
mock_mc.get_contact_by_key_prefix = MagicMock(return_value=None)
mock_result = MagicMock()
mock_result.type = EventType.OK
mock_mc.commands.add_contact = AsyncMock(return_value=mock_result)
radio_manager._meshcore = mock_mc
result = await sync_recent_contacts_to_radio()
assert result["loaded"] == 1
payload = mock_mc.commands.add_contact.call_args.args[0]
assert payload["out_path"] == "3f3f69de1c7b7e7662"
assert payload["out_path_len"] == 3
assert payload["out_path_hash_mode"] == 2
@pytest.mark.asyncio
async def test_mc_param_bypasses_lock_acquisition(self, test_db):
"""When mc is passed, the function uses it directly without acquiring radio_operation.

2
uv.lock generated
View File

@@ -1049,7 +1049,7 @@ wheels = [
[[package]]
name = "remoteterm-meshcore"
version = "2.6.1"
version = "2.7.9"
source = { virtual = "." }
dependencies = [
{ name = "aiomqtt" },