Add hot-message cache

This commit is contained in:
Jack Kingsman
2026-02-07 22:46:28 -08:00
parent 7d825a07f8
commit f32ba0629a
5 changed files with 650 additions and 29 deletions

View File

@@ -24,7 +24,13 @@ frontend/
│ ├── api.ts # REST API client
│ ├── types.ts # TypeScript interfaces
│ ├── useWebSocket.ts # WebSocket hook with auto-reconnect
│ ├── messageCache.ts # LRU message cache for conversation switching
│ ├── styles.css # Dark theme CSS
│ ├── hooks/
│ │ ├── index.ts
│ │ ├── useConversationMessages.ts # Message fetching, pagination, cache integration
│ │ ├── useUnreadCounts.ts # Unread count tracking
│ │ └── useRepeaterMode.ts # Repeater login/CLI mode
│ ├── utils/
│ │ ├── messageParser.ts # Text parsing utilities
│ │ ├── conversationState.ts # localStorage for message times (sidebar sorting)
@@ -88,6 +94,20 @@ App settings are stored server-side and include:
**Migration**: On first load, localStorage preferences are migrated to the server.
The `preferences_migrated` flag prevents duplicate migrations.
### Message Cache (`messageCache.ts`)
An LRU cache stores messages for recently-visited conversations so switching back is instant
(no spinner, no fetch). On switch-away, the active conversation's messages are saved to cache.
On switch-to, cached messages are restored immediately, then a silent background fetch reconciles
with the backend — only updating state if something differs (missed WS message, stale ack).
The happy path (cache is consistent) causes zero rerenders.
- Cache capacity: `MAX_CACHED_CONVERSATIONS` (20) entries, `MAX_MESSAGES_PER_ENTRY` (200) messages each
- Uses `Map` insertion-order for LRU semantics (delete + re-insert promotes to MRU)
- WebSocket messages for non-active cached conversations are written directly to the cache
- `reconcile(current, fetched)` compares by message ID + ack count, returns merged array or `null`
- Deleted conversations are evicted from cache via `remove()`
### State Flow
1. **REST API** fetches initial data on mount in parallel (config, settings, channels, contacts, unreads)
@@ -521,9 +541,16 @@ npm run test # Watch mode
- `messageParser.test.ts` - Sender extraction, time formatting, conversation keys
- `unreadCounts.test.ts` - Unread tracking logic
- `contactAvatar.test.ts` - Avatar text extraction, color generation, repeater handling
- `messageDeduplication.test.ts` - Message deduplication logic
- `useConversationMessages.test.ts` - Message content key generation, ack update logic
- `messageCache.test.ts` - LRU cache: eviction, dedup, ack updates, reconciliation
- `websocket.test.ts` - WebSocket message routing
- `repeaterMode.test.ts` - Repeater CLI parsing, password "." conversion
- `useRepeaterMode.test.ts` - Repeater hook: login flow, CLI commands, state reset
- `integration.test.ts` - Cross-component integration scenarios
- `urlHash.test.ts` - URL hash parsing and generation
- `pathUtils.test.ts` - Path distance calculation utilities
- `radioPresets.test.ts` - Radio preset configuration
- `api.test.ts` - API client request formatting
### Test Setup

View File

@@ -7,6 +7,7 @@ import {
useConversationMessages,
getMessageContentKey,
} from './hooks';
import * as messageCache from './messageCache';
import { StatusBar } from './components/StatusBar';
import { Sidebar } from './components/Sidebar';
import { MessageList } from './components/MessageList';
@@ -181,31 +182,38 @@ export function App() {
// Track for unread counts and sorting
trackNewMessage(msg);
// Count unread for non-active, incoming messages (with deduplication)
if (!msg.outgoing && !isForActiveConversation) {
// Skip if we've already seen this message content (prevents duplicate increments
// when the same message arrives via multiple mesh paths)
// For non-active conversations: update cache and count unreads
if (!isForActiveConversation) {
const contentKey = getMessageContentKey(msg);
if (seenMessageContentRef.current.has(contentKey)) {
return;
}
seenMessageContentRef.current.add(contentKey);
// Limit set size to prevent memory issues
if (seenMessageContentRef.current.size > 1000) {
const keys = Array.from(seenMessageContentRef.current);
seenMessageContentRef.current = new Set(keys.slice(-500));
}
// Update message cache (instant restore on switch)
messageCache.addMessage(msg.conversation_key, msg, contentKey);
let stateKey: string | null = null;
if (msg.type === 'CHAN' && msg.conversation_key) {
stateKey = getStateKey('channel', msg.conversation_key);
} else if (msg.type === 'PRIV' && msg.conversation_key) {
stateKey = getStateKey('contact', msg.conversation_key);
}
if (stateKey) {
const hasMention = checkMention(msg.text);
incrementUnread(stateKey, hasMention);
// Count unread for incoming messages (with deduplication)
if (!msg.outgoing) {
// Skip if we've already seen this message content (prevents duplicate increments
// when the same message arrives via multiple mesh paths)
if (seenMessageContentRef.current.has(contentKey)) {
return;
}
seenMessageContentRef.current.add(contentKey);
// Limit set size to prevent memory issues
if (seenMessageContentRef.current.size > 1000) {
const keys = Array.from(seenMessageContentRef.current);
seenMessageContentRef.current = new Set(keys.slice(-500));
}
let stateKey: string | null = null;
if (msg.type === 'CHAN' && msg.conversation_key) {
stateKey = getStateKey('channel', msg.conversation_key);
} else if (msg.type === 'PRIV' && msg.conversation_key) {
stateKey = getStateKey('contact', msg.conversation_key);
}
if (stateKey) {
const hasMention = checkMention(msg.text);
incrementUnread(stateKey, hasMention);
}
}
}
},
@@ -242,6 +250,7 @@ export function App() {
},
onMessageAcked: (messageId: number, ackCount: number, paths?: MessagePath[]) => {
updateMessageAck(messageId, ackCount, paths);
messageCache.updateAck(messageId, ackCount, paths);
},
}),
[addMessageIfNew, trackNewMessage, incrementUnread, updateMessageAck, checkMention]
@@ -600,6 +609,7 @@ export function App() {
if (!confirm('Delete this channel? Message history will be preserved.')) return;
try {
await api.deleteChannel(key);
messageCache.remove(key);
setChannels((prev) => prev.filter((c) => c.key !== key));
setActiveConversation(null);
toast.success('Channel deleted');
@@ -616,6 +626,7 @@ export function App() {
if (!confirm('Delete this contact? Message history will be preserved.')) return;
try {
await api.deleteContact(publicKey);
messageCache.remove(publicKey);
setContacts((prev) => prev.filter((c) => c.public_key !== publicKey));
setActiveConversation(null);
toast.success('Contact deleted');
@@ -960,6 +971,7 @@ export function App() {
</div>
</div>
<MessageList
key={activeConversation.id}
messages={messages}
contacts={contacts}
loading={messagesLoading}

View File

@@ -1,6 +1,7 @@
import { useState, useCallback, useEffect, useRef } from 'react';
import { toast } from '../components/ui/sonner';
import { api, isAbortError } from '../api';
import * as messageCache from '../messageCache';
import type { Conversation, Message, MessagePath } from '../types';
const MESSAGE_PAGE_SIZE = 200;
@@ -39,6 +40,20 @@ export function useConversationMessages(
// Ref to track the conversation ID being fetched to prevent stale responses
const fetchingConversationIdRef = useRef<string | null>(null);
// --- Cache integration refs ---
// Keep refs in sync with state so we can read current values in the switch effect
const messagesRef = useRef<Message[]>([]);
const hasOlderMessagesRef = useRef(false);
const prevConversationIdRef = useRef<string | null>(null);
useEffect(() => {
messagesRef.current = messages;
}, [messages]);
useEffect(() => {
hasOlderMessagesRef.current = hasOlderMessages;
}, [hasOlderMessages]);
// Fetch messages for active conversation
// Note: This is called manually and from the useEffect. The useEffect handles
// cancellation via AbortController; manual calls (e.g., after sending a message)
@@ -146,15 +161,64 @@ export function useConversationMessages(
}
}, [activeConversation, loadingOlder, hasOlderMessages, messages]);
// Fetch messages when conversation changes, with proper cancellation
// Background reconciliation: silently fetch from backend after a cache restore
// and only update state if something differs (missed WS message, stale ack, etc.).
// No-ops on the happy path — zero rerenders when cache is already consistent.
function reconcileFromBackend(conversation: Conversation, signal: AbortSignal) {
const conversationId = conversation.id;
api
.getMessages(
{
type: conversation.type === 'channel' ? 'CHAN' : 'PRIV',
conversation_key: conversationId,
limit: MESSAGE_PAGE_SIZE,
},
signal
)
.then((data) => {
// Stale check — conversation may have changed while awaiting
if (fetchingConversationIdRef.current !== conversationId) return;
const merged = messageCache.reconcile(messagesRef.current, data);
if (!merged) return; // Cache was consistent — no rerender
setMessages(merged);
seenMessageContent.current.clear();
for (const msg of merged) {
seenMessageContent.current.add(getMessageContentKey(msg));
}
if (data.length >= MESSAGE_PAGE_SIZE) {
setHasOlderMessages(true);
}
})
.catch((err) => {
if (isAbortError(err)) return;
// Silent failure — we already have cached data
console.debug('Background reconciliation failed:', err);
});
}
// Fetch messages when conversation changes, with proper cancellation and caching
useEffect(() => {
// Abort any previous in-flight request
if (abortControllerRef.current) {
abortControllerRef.current.abort();
}
// Track which conversation we're now fetching
fetchingConversationIdRef.current = activeConversation?.id ?? null;
// Save outgoing conversation to cache (if it had messages loaded)
const prevId = prevConversationIdRef.current;
if (prevId && messagesRef.current.length > 0) {
messageCache.set(prevId, {
messages: messagesRef.current,
seenContent: new Set(seenMessageContent.current),
hasOlderMessages: hasOlderMessagesRef.current,
});
}
// Track which conversation we're now on
const newId = activeConversation?.id ?? null;
fetchingConversationIdRef.current = newId;
prevConversationIdRef.current = newId;
// Clear state for new conversation
if (!activeConversation || activeConversation.type === 'raw') {
@@ -163,12 +227,24 @@ export function useConversationMessages(
return;
}
// Create new AbortController for this fetch
// Create AbortController for this conversation's fetch (cache reconcile or full fetch)
const controller = new AbortController();
abortControllerRef.current = controller;
// Fetch messages with the abort signal
fetchMessages(true, controller.signal);
// Check cache for the new conversation
const cached = messageCache.get(activeConversation.id);
if (cached) {
// Restore from cache instantly — no spinner
setMessages(cached.messages);
seenMessageContent.current = new Set(cached.seenContent);
setHasOlderMessages(cached.hasOlderMessages);
setMessagesLoading(false);
// Silently reconcile with backend in case we missed a WS message
reconcileFromBackend(activeConversation, controller.signal);
} else {
// Not cached — full fetch with spinner
fetchMessages(true, controller.signal);
}
// Cleanup: abort request if conversation changes or component unmounts
return () => {

View File

@@ -0,0 +1,126 @@
/**
* LRU message cache for recently-visited conversations.
*
* Uses Map insertion-order semantics: the most recently used entry
* is always at the end. Eviction removes the first (least-recently-used) entry.
*
* Cache size: 20 conversations, 200 messages each (~2.4MB worst case).
*/
import type { Message, MessagePath } from './types';
export const MAX_CACHED_CONVERSATIONS = 20;
export const MAX_MESSAGES_PER_ENTRY = 200;
export interface CacheEntry {
messages: Message[];
seenContent: Set<string>;
hasOlderMessages: boolean;
}
const cache = new Map<string, CacheEntry>();
/** Get a cached entry and promote it to most-recently-used. */
export function get(id: string): CacheEntry | undefined {
const entry = cache.get(id);
if (!entry) return undefined;
// Promote to MRU: delete and re-insert
cache.delete(id);
cache.set(id, entry);
return entry;
}
/** Insert or update an entry at MRU position, evicting LRU if over capacity. */
export function set(id: string, entry: CacheEntry): void {
// Trim to most recent messages to bound memory
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
const trimmed = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
entry = { ...entry, messages: trimmed, hasOlderMessages: true };
}
// Remove first so re-insert moves to end
cache.delete(id);
cache.set(id, entry);
// Evict LRU (first entry) if over capacity
if (cache.size > MAX_CACHED_CONVERSATIONS) {
const lruKey = cache.keys().next().value as string;
cache.delete(lruKey);
}
}
/** Add a message to a cached (non-active) conversation with dedup. */
export function addMessage(id: string, msg: Message, contentKey: string): void {
const entry = cache.get(id);
if (!entry) return;
if (entry.seenContent.has(contentKey)) return;
if (entry.messages.some((m) => m.id === msg.id)) return;
entry.seenContent.add(contentKey);
entry.messages = [...entry.messages, msg];
// Trim if over limit (drop oldest by received_at)
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
entry.messages = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
}
}
/** Scan all cached entries for a message ID and update its ack/paths. */
export function updateAck(messageId: number, ackCount: number, paths?: MessagePath[]): void {
for (const entry of cache.values()) {
const idx = entry.messages.findIndex((m) => m.id === messageId);
if (idx >= 0) {
const updated = [...entry.messages];
updated[idx] = {
...entry.messages[idx],
acked: ackCount,
...(paths !== undefined && { paths }),
};
entry.messages = updated;
return; // Message IDs are unique, stop after first match
}
}
}
/**
* Compare fetched messages against current state.
* Returns merged array if there are differences (new messages or ack changes),
* or null if the cache is already consistent (happy path — no rerender needed).
* Preserves any older paginated messages not present in the fetched page.
*/
export function reconcile(current: Message[], fetched: Message[]): Message[] | null {
const currentById = new Map<number, number>();
for (const m of current) {
currentById.set(m.id, m.acked);
}
let needsUpdate = false;
for (const m of fetched) {
const currentAck = currentById.get(m.id);
if (currentAck === undefined || currentAck !== m.acked) {
needsUpdate = true;
break;
}
}
if (!needsUpdate) return null;
// Merge: fresh recent page + any older paginated messages not in the fetch
const fetchedIds = new Set(fetched.map((m) => m.id));
const olderMessages = current.filter((m) => !fetchedIds.has(m.id));
return [...fetched, ...olderMessages];
}
/** Evict a specific conversation from the cache. */
export function remove(id: string): void {
cache.delete(id);
}
/** Clear the entire cache. */
export function clear(): void {
cache.clear();
}
/** Get current cache size (for testing). */
export function size(): number {
return cache.size;
}

View File

@@ -0,0 +1,380 @@
/**
* Tests for the LRU message cache.
*/
import { describe, it, expect, beforeEach } from 'vitest';
import * as messageCache from '../messageCache';
import { MAX_CACHED_CONVERSATIONS, MAX_MESSAGES_PER_ENTRY } from '../messageCache';
import type { Message } from '../types';
function createMessage(overrides: Partial<Message> = {}): Message {
return {
id: 1,
type: 'CHAN',
conversation_key: 'channel123',
text: 'Hello world',
sender_timestamp: 1700000000,
received_at: 1700000001,
paths: null,
txt_type: 0,
signature: null,
outgoing: false,
acked: 0,
...overrides,
};
}
function createEntry(messages: Message[] = [], hasOlderMessages = false): messageCache.CacheEntry {
const seenContent = new Set<string>();
for (const msg of messages) {
seenContent.add(`${msg.type}-${msg.conversation_key}-${msg.text}-${msg.sender_timestamp}`);
}
return { messages, seenContent, hasOlderMessages };
}
describe('messageCache', () => {
beforeEach(() => {
messageCache.clear();
});
describe('get/set', () => {
it('returns undefined for missing entries', () => {
expect(messageCache.get('nonexistent')).toBeUndefined();
});
it('stores and retrieves entries', () => {
const msg = createMessage();
const entry = createEntry([msg], true);
messageCache.set('conv1', entry);
const result = messageCache.get('conv1');
expect(result).toBeDefined();
expect(result!.messages).toHaveLength(1);
expect(result!.messages[0].text).toBe('Hello world');
expect(result!.hasOlderMessages).toBe(true);
});
it('trims messages to MAX_MESSAGES_PER_ENTRY on set', () => {
const messages = Array.from({ length: MAX_MESSAGES_PER_ENTRY + 50 }, (_, i) =>
createMessage({ id: i, received_at: 1700000000 + i })
);
messageCache.set('conv1', createEntry(messages));
const entry = messageCache.get('conv1');
expect(entry!.messages).toHaveLength(MAX_MESSAGES_PER_ENTRY);
});
it('keeps the most recent messages when trimming', () => {
const messages = Array.from({ length: MAX_MESSAGES_PER_ENTRY + 10 }, (_, i) =>
createMessage({ id: i, received_at: 1700000000 + i })
);
messageCache.set('conv1', createEntry(messages));
const entry = messageCache.get('conv1');
// Most recent message (highest received_at) should be present
const maxReceivedAt = MAX_MESSAGES_PER_ENTRY + 10 - 1;
expect(entry!.messages.some((m) => m.received_at === 1700000000 + maxReceivedAt)).toBe(true);
// Oldest messages should be trimmed
expect(entry!.messages.some((m) => m.received_at === 1700000000)).toBe(false);
});
it('sets hasOlderMessages to true when trimming', () => {
const messages = Array.from({ length: MAX_MESSAGES_PER_ENTRY + 1 }, (_, i) =>
createMessage({ id: i, received_at: 1700000000 + i })
);
messageCache.set('conv1', createEntry(messages, false));
const entry = messageCache.get('conv1');
expect(entry!.hasOlderMessages).toBe(true);
});
it('overwrites existing entries', () => {
const entry1 = createEntry([createMessage({ text: 'first' })]);
const entry2 = createEntry([createMessage({ text: 'second' })]);
messageCache.set('conv1', entry1);
messageCache.set('conv1', entry2);
const result = messageCache.get('conv1');
expect(result!.messages[0].text).toBe('second');
expect(messageCache.size()).toBe(1);
});
});
describe('LRU eviction', () => {
it('evicts least-recently-used entry when over capacity', () => {
// Fill cache to capacity + 1
for (let i = 0; i <= MAX_CACHED_CONVERSATIONS; i++) {
messageCache.set(`conv${i}`, createEntry([createMessage({ id: i })]));
}
// conv0 (LRU) should be evicted
expect(messageCache.get('conv0')).toBeUndefined();
// Remaining entries should still exist
for (let i = 1; i <= MAX_CACHED_CONVERSATIONS; i++) {
expect(messageCache.get(`conv${i}`)).toBeDefined();
}
});
it('promotes accessed entries to MRU', () => {
// Fill cache to capacity
for (let i = 0; i < MAX_CACHED_CONVERSATIONS; i++) {
messageCache.set(`conv${i}`, createEntry([createMessage({ id: i })]));
}
// Access conv0, promoting it to MRU
messageCache.get('conv0');
// Add one more - conv1 should now be LRU and get evicted
messageCache.set('conv_new', createEntry());
expect(messageCache.get('conv0')).toBeDefined(); // Was promoted
expect(messageCache.get('conv1')).toBeUndefined(); // Was LRU, evicted
expect(messageCache.get('conv_new')).toBeDefined();
});
it('promotes set entries to MRU', () => {
for (let i = 0; i < MAX_CACHED_CONVERSATIONS; i++) {
messageCache.set(`conv${i}`, createEntry([createMessage({ id: i })]));
}
// Re-set conv0 (promotes to MRU)
messageCache.set('conv0', createEntry([createMessage({ id: 100 })]));
// Add one more - conv1 should be LRU and get evicted
messageCache.set('conv_new', createEntry());
expect(messageCache.get('conv0')).toBeDefined();
expect(messageCache.get('conv1')).toBeUndefined();
});
});
describe('addMessage', () => {
it('adds message to existing cached conversation', () => {
messageCache.set('conv1', createEntry([]));
const msg = createMessage({ id: 10, text: 'New message' });
messageCache.addMessage('conv1', msg, 'CHAN-channel123-New message-1700000000');
const entry = messageCache.get('conv1');
expect(entry!.messages).toHaveLength(1);
expect(entry!.messages[0].text).toBe('New message');
});
it('deduplicates by content key', () => {
messageCache.set('conv1', createEntry([]));
const msg1 = createMessage({ id: 10, text: 'Hello' });
const contentKey = 'CHAN-channel123-Hello-1700000000';
messageCache.addMessage('conv1', msg1, contentKey);
// Same content key, different message id
const msg2 = createMessage({ id: 11, text: 'Hello' });
messageCache.addMessage('conv1', msg2, contentKey);
const entry = messageCache.get('conv1');
expect(entry!.messages).toHaveLength(1);
});
it('deduplicates by message id', () => {
messageCache.set('conv1', createEntry([createMessage({ id: 10, text: 'Original' })]));
// Same id, different content key
const msg = createMessage({ id: 10, text: 'Different' });
messageCache.addMessage('conv1', msg, 'CHAN-channel123-Different-1700000000');
const entry = messageCache.get('conv1');
expect(entry!.messages).toHaveLength(1);
expect(entry!.messages[0].text).toBe('Original');
});
it('trims to MAX_MESSAGES_PER_ENTRY when adding to a full entry', () => {
const messages = Array.from({ length: MAX_MESSAGES_PER_ENTRY }, (_, i) =>
createMessage({ id: i, received_at: 1700000000 + i })
);
messageCache.set('conv1', createEntry(messages));
// Add one more (newest)
const newMsg = createMessage({
id: MAX_MESSAGES_PER_ENTRY,
text: 'newest',
received_at: 1700000000 + MAX_MESSAGES_PER_ENTRY,
});
messageCache.addMessage('conv1', newMsg, `CHAN-channel123-newest-${newMsg.sender_timestamp}`);
const entry = messageCache.get('conv1');
expect(entry!.messages).toHaveLength(MAX_MESSAGES_PER_ENTRY);
// Newest message should be kept
expect(entry!.messages.some((m) => m.id === MAX_MESSAGES_PER_ENTRY)).toBe(true);
// Oldest message (id=0) should be trimmed
expect(entry!.messages.some((m) => m.id === 0)).toBe(false);
});
it('ignores messages for non-cached conversations', () => {
const msg = createMessage({ id: 10 });
// Should not throw
messageCache.addMessage('nonexistent', msg, 'key');
expect(messageCache.size()).toBe(0);
});
});
describe('updateAck', () => {
it('updates ack count for a message in cache', () => {
const msg = createMessage({ id: 42, acked: 0 });
messageCache.set('conv1', createEntry([msg]));
messageCache.updateAck(42, 3);
const entry = messageCache.get('conv1');
expect(entry!.messages[0].acked).toBe(3);
});
it('updates paths when provided', () => {
const msg = createMessage({ id: 42, acked: 0, paths: null });
messageCache.set('conv1', createEntry([msg]));
const newPaths = [{ path: '1A2B', received_at: 1700000000 }];
messageCache.updateAck(42, 1, newPaths);
const entry = messageCache.get('conv1');
expect(entry!.messages[0].acked).toBe(1);
expect(entry!.messages[0].paths).toEqual(newPaths);
});
it('does not modify paths when not provided', () => {
const existingPaths = [{ path: '1A2B', received_at: 1700000000 }];
const msg = createMessage({ id: 42, acked: 1, paths: existingPaths });
messageCache.set('conv1', createEntry([msg]));
messageCache.updateAck(42, 2);
const entry = messageCache.get('conv1');
expect(entry!.messages[0].acked).toBe(2);
expect(entry!.messages[0].paths).toEqual(existingPaths);
});
it('scans across multiple cached conversations', () => {
const msg1 = createMessage({ id: 10, conversation_key: 'conv1', acked: 0 });
const msg2 = createMessage({ id: 20, conversation_key: 'conv2', acked: 0 });
messageCache.set('conv1', createEntry([msg1]));
messageCache.set('conv2', createEntry([msg2]));
messageCache.updateAck(20, 5);
expect(messageCache.get('conv1')!.messages[0].acked).toBe(0); // Unchanged
expect(messageCache.get('conv2')!.messages[0].acked).toBe(5); // Updated
});
it('does nothing for unknown message id', () => {
const msg = createMessage({ id: 42, acked: 0 });
messageCache.set('conv1', createEntry([msg]));
messageCache.updateAck(999, 3);
expect(messageCache.get('conv1')!.messages[0].acked).toBe(0);
});
});
describe('remove', () => {
it('removes a specific conversation', () => {
messageCache.set('conv1', createEntry());
messageCache.set('conv2', createEntry());
messageCache.remove('conv1');
expect(messageCache.get('conv1')).toBeUndefined();
expect(messageCache.get('conv2')).toBeDefined();
expect(messageCache.size()).toBe(1);
});
it('does nothing for non-existent key', () => {
messageCache.set('conv1', createEntry());
messageCache.remove('nonexistent');
expect(messageCache.size()).toBe(1);
});
});
describe('reconcile', () => {
it('returns null when cache matches fetched data (happy path)', () => {
const msgs = [
createMessage({ id: 1, acked: 2 }),
createMessage({ id: 2, acked: 0 }),
createMessage({ id: 3, acked: 1 }),
];
const fetched = [
createMessage({ id: 1, acked: 2 }),
createMessage({ id: 2, acked: 0 }),
createMessage({ id: 3, acked: 1 }),
];
expect(messageCache.reconcile(msgs, fetched)).toBeNull();
});
it('detects new messages missing from cache', () => {
const current = [createMessage({ id: 1 }), createMessage({ id: 2 })];
const fetched = [
createMessage({ id: 1 }),
createMessage({ id: 2 }),
createMessage({ id: 3, text: 'missed via WS' }),
];
const merged = messageCache.reconcile(current, fetched);
expect(merged).not.toBeNull();
expect(merged!.map((m) => m.id)).toEqual([1, 2, 3]);
});
it('detects stale ack counts', () => {
const current = [createMessage({ id: 1, acked: 0 })];
const fetched = [createMessage({ id: 1, acked: 3 })];
const merged = messageCache.reconcile(current, fetched);
expect(merged).not.toBeNull();
expect(merged![0].acked).toBe(3);
});
it('preserves older paginated messages not in fetch', () => {
// Current state has recent page + older paginated messages
const current = [
createMessage({ id: 3 }),
createMessage({ id: 2 }),
createMessage({ id: 1 }), // older, from pagination
];
// Fetch only returns recent page with a new message
const fetched = [
createMessage({ id: 4, text: 'new' }),
createMessage({ id: 3 }),
createMessage({ id: 2 }),
];
const merged = messageCache.reconcile(current, fetched);
expect(merged).not.toBeNull();
// Should have fetched page + older paginated message
expect(merged!.map((m) => m.id)).toEqual([4, 3, 2, 1]);
});
it('returns null for empty fetched and empty current', () => {
expect(messageCache.reconcile([], [])).toBeNull();
});
it('detects difference when current is empty but fetch has messages', () => {
const fetched = [createMessage({ id: 1 })];
const merged = messageCache.reconcile([], fetched);
expect(merged).not.toBeNull();
expect(merged!).toHaveLength(1);
});
});
describe('clear', () => {
it('removes all entries', () => {
messageCache.set('conv1', createEntry());
messageCache.set('conv2', createEntry());
messageCache.set('conv3', createEntry());
messageCache.clear();
expect(messageCache.size()).toBe(0);
expect(messageCache.get('conv1')).toBeUndefined();
});
});
});