Finish frontend phase 3

This commit is contained in:
Jack Kingsman
2026-03-16 17:17:11 -07:00
parent 0e4828bf72
commit ffb5fa51c1
5 changed files with 232 additions and 243 deletions

View File

@@ -1,12 +1,174 @@
import { useCallback, useEffect, useRef, useState } from 'react';
import { toast } from '../components/ui/sonner';
import { api, isAbortError } from '../api';
import * as messageCache from '../messageCache';
import type { Conversation, Message, MessagePath } from '../types';
import { getMessageContentKey } from '../utils/messageIdentity';
const MAX_PENDING_ACKS = 500;
const MESSAGE_PAGE_SIZE = 200;
export const MAX_CACHED_CONVERSATIONS = 20;
export const MAX_MESSAGES_PER_ENTRY = 200;
interface CachedConversationEntry {
messages: Message[];
hasOlderMessages: boolean;
}
interface InternalCachedConversationEntry extends CachedConversationEntry {
contentKeys: Set<string>;
}
export class ConversationMessageCache {
private readonly cache = new Map<string, InternalCachedConversationEntry>();
get(id: string): CachedConversationEntry | undefined {
const entry = this.cache.get(id);
if (!entry) return undefined;
this.cache.delete(id);
this.cache.set(id, entry);
return {
messages: entry.messages,
hasOlderMessages: entry.hasOlderMessages,
};
}
set(id: string, entry: CachedConversationEntry): void {
const contentKeys = new Set(entry.messages.map((message) => getMessageContentKey(message)));
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
const trimmed = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
entry = { ...entry, messages: trimmed, hasOlderMessages: true };
}
const internalEntry: InternalCachedConversationEntry = {
...entry,
contentKeys,
};
this.cache.delete(id);
this.cache.set(id, internalEntry);
if (this.cache.size > MAX_CACHED_CONVERSATIONS) {
const lruKey = this.cache.keys().next().value as string;
this.cache.delete(lruKey);
}
}
addMessage(id: string, msg: Message): boolean {
const entry = this.cache.get(id);
const contentKey = getMessageContentKey(msg);
if (!entry) {
this.cache.set(id, {
messages: [msg],
hasOlderMessages: true,
contentKeys: new Set([contentKey]),
});
if (this.cache.size > MAX_CACHED_CONVERSATIONS) {
const lruKey = this.cache.keys().next().value as string;
this.cache.delete(lruKey);
}
return true;
}
if (entry.contentKeys.has(contentKey)) return false;
if (entry.messages.some((message) => message.id === msg.id)) return false;
entry.contentKeys.add(contentKey);
entry.messages = [...entry.messages, msg];
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
entry.messages = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
}
this.cache.delete(id);
this.cache.set(id, entry);
return true;
}
updateAck(messageId: number, ackCount: number, paths?: MessagePath[]): void {
for (const entry of this.cache.values()) {
const index = entry.messages.findIndex((message) => message.id === messageId);
if (index < 0) continue;
const current = entry.messages[index];
const updated = [...entry.messages];
updated[index] = {
...current,
acked: Math.max(current.acked, ackCount),
...(paths !== undefined && paths.length >= (current.paths?.length ?? 0) && { paths }),
};
entry.messages = updated;
return;
}
}
remove(id: string): void {
this.cache.delete(id);
}
rename(oldId: string, newId: string): void {
if (oldId === newId) return;
const oldEntry = this.cache.get(oldId);
if (!oldEntry) return;
const newEntry = this.cache.get(newId);
if (!newEntry) {
this.cache.delete(oldId);
this.cache.set(newId, oldEntry);
return;
}
const mergedMessages = [...newEntry.messages];
const seenIds = new Set(mergedMessages.map((message) => message.id));
for (const message of oldEntry.messages) {
if (!seenIds.has(message.id)) {
mergedMessages.push(message);
seenIds.add(message.id);
}
}
this.cache.delete(oldId);
this.cache.set(newId, {
messages: mergedMessages,
hasOlderMessages: newEntry.hasOlderMessages || oldEntry.hasOlderMessages,
contentKeys: new Set([...newEntry.contentKeys, ...oldEntry.contentKeys]),
});
}
clear(): void {
this.cache.clear();
}
}
export function reconcileConversationMessages(
current: Message[],
fetched: Message[]
): Message[] | null {
const currentById = new Map<number, { acked: number; pathsLen: number; text: string }>();
for (const message of current) {
currentById.set(message.id, {
acked: message.acked,
pathsLen: message.paths?.length ?? 0,
text: message.text,
});
}
let needsUpdate = false;
for (const message of fetched) {
const currentMessage = currentById.get(message.id);
if (
!currentMessage ||
currentMessage.acked !== message.acked ||
currentMessage.pathsLen !== (message.paths?.length ?? 0) ||
currentMessage.text !== message.text
) {
needsUpdate = true;
break;
}
}
if (!needsUpdate) return null;
const fetchedIds = new Set(fetched.map((message) => message.id));
const olderMessages = current.filter((message) => !fetchedIds.has(message.id));
return [...fetched, ...olderMessages];
}
export const conversationMessageCache = new ConversationMessageCache();
interface PendingAckUpdate {
ackCount: number;
@@ -167,6 +329,7 @@ export function useConversationMessages(
const pendingReconnectReconcileRef = useRef(false);
const messagesRef = useRef<Message[]>([]);
const loadingOlderRef = useRef(false);
const loadingNewerRef = useRef(false);
const hasOlderMessagesRef = useRef(false);
const hasNewerMessagesRef = useRef(false);
const prevConversationIdRef = useRef<string | null>(null);
@@ -181,6 +344,10 @@ export function useConversationMessages(
loadingOlderRef.current = loadingOlder;
}, [loadingOlder]);
useEffect(() => {
loadingNewerRef.current = loadingNewer;
}, [loadingNewer]);
useEffect(() => {
hasOlderMessagesRef.current = hasOlderMessages;
}, [hasOlderMessages]);
@@ -230,7 +397,7 @@ export function useConversationMessages(
}
const messagesWithPendingAck = data.map((msg) => applyPendingAck(msg));
const merged = messageCache.reconcile(messagesRef.current, messagesWithPendingAck);
const merged = reconcileConversationMessages(messagesRef.current, messagesWithPendingAck);
const nextMessages = merged ?? messagesRef.current;
if (merged) {
setMessages(merged);
@@ -272,7 +439,7 @@ export function useConversationMessages(
const dataWithPendingAck = data.map((msg) => applyPendingAck(msg));
setHasOlderMessages(dataWithPendingAck.length >= MESSAGE_PAGE_SIZE);
const merged = messageCache.reconcile(messagesRef.current, dataWithPendingAck);
const merged = reconcileConversationMessages(messagesRef.current, dataWithPendingAck);
if (!merged) return;
setMessages(merged);
@@ -296,7 +463,7 @@ export function useConversationMessages(
}
const conversationId = activeConversation.id;
const oldestMessage = messages.reduce(
const oldestMessage = messagesRef.current.reduce(
(oldest, msg) => {
if (!oldest) return msg;
if (msg.received_at < oldest.received_at) return msg;
@@ -357,13 +524,19 @@ export function useConversationMessages(
loadingOlderRef.current = false;
setLoadingOlder(false);
}
}, [activeConversation, applyPendingAck, messages, syncSeenContent]);
}, [activeConversation, applyPendingAck, syncSeenContent]);
const fetchNewerMessages = useCallback(async () => {
if (!isMessageConversation(activeConversation) || loadingNewer || !hasNewerMessages) return;
if (
!isMessageConversation(activeConversation) ||
loadingNewerRef.current ||
!hasNewerMessagesRef.current
) {
return;
}
const conversationId = activeConversation.id;
const newestMessage = messages.reduce(
const newestMessage = messagesRef.current.reduce(
(newest, msg) => {
if (!newest) return msg;
if (msg.received_at > newest.received_at) return msg;
@@ -374,6 +547,7 @@ export function useConversationMessages(
);
if (!newestMessage) return;
loadingNewerRef.current = true;
setLoadingNewer(true);
const controller = new AbortController();
newerAbortControllerRef.current = controller;
@@ -423,28 +597,22 @@ export function useConversationMessages(
if (newerAbortControllerRef.current === controller) {
newerAbortControllerRef.current = null;
}
loadingNewerRef.current = false;
setLoadingNewer(false);
}
}, [
activeConversation,
applyPendingAck,
hasNewerMessages,
loadingNewer,
messages,
reconcileFromBackend,
]);
}, [activeConversation, applyPendingAck, reconcileFromBackend]);
const jumpToBottom = useCallback(() => {
if (!activeConversation) return;
setHasNewerMessages(false);
messageCache.remove(activeConversation.id);
conversationMessageCache.remove(activeConversation.id);
void fetchLatestMessages(true);
}, [activeConversation, fetchLatestMessages]);
const reloadCurrentConversation = useCallback(() => {
if (!isMessageConversation(activeConversation)) return;
setHasNewerMessages(false);
messageCache.remove(activeConversation.id);
conversationMessageCache.remove(activeConversation.id);
setReloadVersion((current) => current + 1);
}, [activeConversation]);
@@ -506,7 +674,7 @@ export function useConversationMessages(
messagesRef.current.length > 0 &&
!hasNewerMessagesRef.current
) {
messageCache.set(prevId, {
conversationMessageCache.set(prevId, {
messages: messagesRef.current,
hasOlderMessages: hasOlderMessagesRef.current,
});
@@ -549,7 +717,7 @@ export function useConversationMessages(
setMessagesLoading(false);
});
} else {
const cached = messageCache.get(activeConversation.id);
const cached = conversationMessageCache.get(activeConversation.id);
if (cached) {
setMessages(cached.messages);
seenMessageContent.current = new Set(
@@ -645,7 +813,7 @@ export function useConversationMessages(
const receiveMessageAck = useCallback(
(messageId: number, ackCount: number, paths?: MessagePath[]) => {
updateMessageAck(messageId, ackCount, paths);
messageCache.updateAck(messageId, ackCount, paths);
conversationMessageCache.updateAck(messageId, ackCount, paths);
},
[updateMessageAck]
);
@@ -670,7 +838,10 @@ export function useConversationMessages(
}
return {
added: messageCache.addMessage(msgWithPendingAck.conversation_key, msgWithPendingAck),
added: conversationMessageCache.addMessage(
msgWithPendingAck.conversation_key,
msgWithPendingAck
),
activeConversation: false,
};
},
@@ -678,15 +849,15 @@ export function useConversationMessages(
);
const renameConversationMessages = useCallback((oldId: string, newId: string) => {
messageCache.rename(oldId, newId);
conversationMessageCache.rename(oldId, newId);
}, []);
const removeConversationMessages = useCallback((conversationId: string) => {
messageCache.remove(conversationId);
conversationMessageCache.remove(conversationId);
}, []);
const clearConversationMessages = useCallback(() => {
messageCache.clear();
conversationMessageCache.clear();
}, []);
return {

View File

@@ -1,187 +0,0 @@
/**
* LRU message cache for recently-visited conversations.
*
* Uses Map insertion-order semantics: the most recently used entry
* is always at the end. Eviction removes the first (least-recently-used) entry.
*
* Cache size: 20 conversations, 200 messages each (~2.4MB worst case).
*/
import type { Message, MessagePath } from './types';
import { getMessageContentKey } from './utils/messageIdentity';
export const MAX_CACHED_CONVERSATIONS = 20;
export const MAX_MESSAGES_PER_ENTRY = 200;
interface CacheEntry {
messages: Message[];
hasOlderMessages: boolean;
}
interface InternalCacheEntry extends CacheEntry {
contentKeys: Set<string>;
}
const cache = new Map<string, InternalCacheEntry>();
/** Get a cached entry and promote it to most-recently-used. */
export function get(id: string): CacheEntry | undefined {
const entry = cache.get(id);
if (!entry) return undefined;
// Promote to MRU: delete and re-insert
cache.delete(id);
cache.set(id, entry);
return {
messages: entry.messages,
hasOlderMessages: entry.hasOlderMessages,
};
}
/** Insert or update an entry at MRU position, evicting LRU if over capacity. */
export function set(id: string, entry: CacheEntry): void {
const contentKeys = new Set(entry.messages.map((message) => getMessageContentKey(message)));
// Trim to most recent messages to bound memory
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
const trimmed = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
entry = { ...entry, messages: trimmed, hasOlderMessages: true };
}
const internalEntry: InternalCacheEntry = {
...entry,
contentKeys,
};
// Remove first so re-insert moves to end
cache.delete(id);
cache.set(id, internalEntry);
// Evict LRU (first entry) if over capacity
if (cache.size > MAX_CACHED_CONVERSATIONS) {
const lruKey = cache.keys().next().value as string;
cache.delete(lruKey);
}
}
/** Add a message to a cached conversation with dedup. Returns true if new, false if duplicate. */
export function addMessage(id: string, msg: Message): boolean {
const entry = cache.get(id);
const contentKey = getMessageContentKey(msg);
if (!entry) {
// Auto-create a minimal entry for never-visited conversations
cache.set(id, {
messages: [msg],
hasOlderMessages: true,
contentKeys: new Set([contentKey]),
});
// Evict LRU if over capacity
if (cache.size > MAX_CACHED_CONVERSATIONS) {
const lruKey = cache.keys().next().value as string;
cache.delete(lruKey);
}
return true;
}
if (entry.contentKeys.has(contentKey)) return false;
if (entry.messages.some((m) => m.id === msg.id)) return false;
entry.contentKeys.add(contentKey);
entry.messages = [...entry.messages, msg];
// Trim if over limit (drop oldest by received_at)
if (entry.messages.length > MAX_MESSAGES_PER_ENTRY) {
entry.messages = [...entry.messages]
.sort((a, b) => b.received_at - a.received_at)
.slice(0, MAX_MESSAGES_PER_ENTRY);
}
// Promote to MRU so actively-messaged conversations aren't evicted
cache.delete(id);
cache.set(id, entry);
return true;
}
/** Scan all cached entries for a message ID and update its ack/paths. */
export function updateAck(messageId: number, ackCount: number, paths?: MessagePath[]): void {
for (const entry of cache.values()) {
const idx = entry.messages.findIndex((m) => m.id === messageId);
if (idx >= 0) {
const current = entry.messages[idx];
const updated = [...entry.messages];
updated[idx] = {
...current,
acked: Math.max(current.acked, ackCount),
...(paths !== undefined && paths.length >= (current.paths?.length ?? 0) && { paths }),
};
entry.messages = updated;
return; // Message IDs are unique, stop after first match
}
}
}
/**
* Compare fetched messages against current state.
* Returns merged array if there are differences (new messages or ack changes),
* or null if the cache is already consistent (happy path — no rerender needed).
* Preserves any older paginated messages not present in the fetched page.
*/
export function reconcile(current: Message[], fetched: Message[]): Message[] | null {
const currentById = new Map<number, { acked: number; pathsLen: number; text: string }>();
for (const m of current) {
currentById.set(m.id, { acked: m.acked, pathsLen: m.paths?.length ?? 0, text: m.text });
}
let needsUpdate = false;
for (const m of fetched) {
const cur = currentById.get(m.id);
if (
!cur ||
cur.acked !== m.acked ||
cur.pathsLen !== (m.paths?.length ?? 0) ||
cur.text !== m.text
) {
needsUpdate = true;
break;
}
}
if (!needsUpdate) return null;
// Merge: fresh recent page + any older paginated messages not in the fetch
const fetchedIds = new Set(fetched.map((m) => m.id));
const olderMessages = current.filter((m) => !fetchedIds.has(m.id));
return [...fetched, ...olderMessages];
}
/** Evict a specific conversation from the cache. */
export function remove(id: string): void {
cache.delete(id);
}
/** Move cached conversation state to a new conversation id. */
export function rename(oldId: string, newId: string): void {
if (oldId === newId) return;
const oldEntry = cache.get(oldId);
if (!oldEntry) return;
const newEntry = cache.get(newId);
if (!newEntry) {
cache.delete(oldId);
cache.set(newId, oldEntry);
return;
}
const mergedMessages = [...newEntry.messages];
const seenIds = new Set(mergedMessages.map((message) => message.id));
for (const message of oldEntry.messages) {
if (!seenIds.has(message.id)) {
mergedMessages.push(message);
seenIds.add(message.id);
}
}
cache.delete(oldId);
cache.set(newId, {
messages: mergedMessages,
hasOlderMessages: newEntry.hasOlderMessages || oldEntry.hasOlderMessages,
contentKeys: new Set([...newEntry.contentKeys, ...oldEntry.contentKeys]),
});
}
/** Clear the entire cache. */
export function clear(): void {
cache.clear();
}

View File

@@ -8,12 +8,12 @@
* between backend and frontend - both sides test against the same data.
*/
import { describe, it, expect, beforeEach } from 'vitest';
import { describe, it, expect } from 'vitest';
import fixtures from './fixtures/websocket_events.json';
import { getStateKey } from '../utils/conversationState';
import { mergeContactIntoList } from '../utils/contactMerge';
import { ConversationMessageCache } from '../hooks/useConversationMessages';
import { getMessageContentKey } from '../utils/messageIdentity';
import * as messageCache from '../messageCache';
import type { Contact, Message } from '../types';
/**
@@ -25,6 +25,7 @@ interface MockState {
unreadCounts: Record<string, number>;
lastMessageTimes: Record<string, number>;
seenActiveContent: Set<string>;
messageCache: ConversationMessageCache;
}
function createMockState(): MockState {
@@ -33,6 +34,7 @@ function createMockState(): MockState {
unreadCounts: {},
lastMessageTimes: {},
seenActiveContent: new Set(),
messageCache: new ConversationMessageCache(),
};
}
@@ -68,7 +70,7 @@ function handleMessageEvent(
state.lastMessageTimes[stateKey] = msg.received_at;
if (!isForActiveConversation) {
const isNew = messageCache.addMessage(msg.conversation_key, msg);
const isNew = state.messageCache.addMessage(msg.conversation_key, msg);
if (!msg.outgoing && isNew) {
state.unreadCounts[stateKey] = (state.unreadCounts[stateKey] || 0) + 1;
unreadIncremented = true;
@@ -78,11 +80,6 @@ function handleMessageEvent(
return { added, unreadIncremented };
}
// Clear messageCache between tests to avoid cross-test contamination
beforeEach(() => {
messageCache.clear();
});
describe('Integration: Channel Message Events', () => {
const fixture = fixtures.channel_message;
@@ -342,11 +339,8 @@ describe('Integration: Contact Merge', () => {
// --- ACK + messageCache propagation tests ---
describe('Integration: ACK + messageCache propagation', () => {
beforeEach(() => {
messageCache.clear();
});
it('updateAck updates acked count on cached message', () => {
const messageCache = new ConversationMessageCache();
const msg: Message = {
id: 100,
type: 'PRIV',
@@ -372,6 +366,7 @@ describe('Integration: ACK + messageCache propagation', () => {
});
it('updateAck updates paths when longer', () => {
const messageCache = new ConversationMessageCache();
const msg: Message = {
id: 101,
type: 'PRIV',
@@ -401,6 +396,7 @@ describe('Integration: ACK + messageCache propagation', () => {
});
it('preserves higher existing ack count (max semantics)', () => {
const messageCache = new ConversationMessageCache();
const msg: Message = {
id: 102,
type: 'PRIV',
@@ -426,6 +422,7 @@ describe('Integration: ACK + messageCache propagation', () => {
});
it('is a no-op for unknown message ID', () => {
const messageCache = new ConversationMessageCache();
const msg: Message = {
id: 103,
type: 'PRIV',

View File

@@ -3,8 +3,12 @@
*/
import { describe, it, expect, beforeEach } from 'vitest';
import * as messageCache from '../messageCache';
import { MAX_CACHED_CONVERSATIONS, MAX_MESSAGES_PER_ENTRY } from '../messageCache';
import {
ConversationMessageCache,
MAX_CACHED_CONVERSATIONS,
MAX_MESSAGES_PER_ENTRY,
reconcileConversationMessages,
} from '../hooks/useConversationMessages';
import type { Message } from '../types';
function createMessage(overrides: Partial<Message> = {}): Message {
@@ -31,8 +35,10 @@ function createEntry(messages: Message[] = [], hasOlderMessages = false) {
}
describe('messageCache', () => {
let messageCache: ConversationMessageCache;
beforeEach(() => {
messageCache.clear();
messageCache = new ConversationMessageCache();
});
describe('get/set', () => {
@@ -337,7 +343,7 @@ describe('messageCache', () => {
createMessage({ id: 3, acked: 1 }),
];
expect(messageCache.reconcile(msgs, fetched)).toBeNull();
expect(reconcileConversationMessages(msgs, fetched)).toBeNull();
});
it('detects new messages missing from cache', () => {
@@ -348,7 +354,7 @@ describe('messageCache', () => {
createMessage({ id: 3, text: 'missed via WS' }),
];
const merged = messageCache.reconcile(current, fetched);
const merged = reconcileConversationMessages(current, fetched);
expect(merged).not.toBeNull();
expect(merged!.map((m) => m.id)).toEqual([1, 2, 3]);
});
@@ -357,7 +363,7 @@ describe('messageCache', () => {
const current = [createMessage({ id: 1, acked: 0 })];
const fetched = [createMessage({ id: 1, acked: 3 })];
const merged = messageCache.reconcile(current, fetched);
const merged = reconcileConversationMessages(current, fetched);
expect(merged).not.toBeNull();
expect(merged![0].acked).toBe(3);
});
@@ -376,20 +382,20 @@ describe('messageCache', () => {
createMessage({ id: 2 }),
];
const merged = messageCache.reconcile(current, fetched);
const merged = reconcileConversationMessages(current, fetched);
expect(merged).not.toBeNull();
// Should have fetched page + older paginated message
expect(merged!.map((m) => m.id)).toEqual([4, 3, 2, 1]);
});
it('returns null for empty fetched and empty current', () => {
expect(messageCache.reconcile([], [])).toBeNull();
expect(reconcileConversationMessages([], [])).toBeNull();
});
it('detects difference when current is empty but fetch has messages', () => {
const fetched = [createMessage({ id: 1 })];
const merged = messageCache.reconcile([], fetched);
const merged = reconcileConversationMessages([], fetched);
expect(merged).not.toBeNull();
expect(merged!).toHaveLength(1);
});
@@ -409,7 +415,7 @@ describe('messageCache', () => {
}),
];
const merged = messageCache.reconcile(current, fetched);
const merged = reconcileConversationMessages(current, fetched);
expect(merged).not.toBeNull();
expect(merged![0].paths).toHaveLength(2);
});
@@ -418,7 +424,7 @@ describe('messageCache', () => {
const current = [createMessage({ id: 1, text: '[encrypted]' })];
const fetched = [createMessage({ id: 1, text: 'Hello world' })];
const merged = messageCache.reconcile(current, fetched);
const merged = reconcileConversationMessages(current, fetched);
expect(merged).not.toBeNull();
expect(merged![0].text).toBe('Hello world');
});
@@ -428,7 +434,7 @@ describe('messageCache', () => {
const current = [createMessage({ id: 1, acked: 2, paths, text: 'Hello' })];
const fetched = [createMessage({ id: 1, acked: 2, paths, text: 'Hello' })];
expect(messageCache.reconcile(current, fetched)).toBeNull();
expect(reconcileConversationMessages(current, fetched)).toBeNull();
});
});
});

View File

@@ -1,9 +1,11 @@
import { act, renderHook, waitFor } from '@testing-library/react';
import { beforeEach, describe, expect, it, vi, type Mock } from 'vitest';
import * as messageCache from '../messageCache';
import { api } from '../api';
import { useConversationMessages } from '../hooks/useConversationMessages';
import {
conversationMessageCache,
useConversationMessages,
} from '../hooks/useConversationMessages';
import type { Conversation, Message } from '../types';
const mockGetMessages = vi.fn<typeof api.getMessages>();
@@ -62,7 +64,7 @@ function createDeferred<T>() {
describe('useConversationMessages ACK ordering', () => {
beforeEach(() => {
mockGetMessages.mockReset();
messageCache.clear();
conversationMessageCache.clear();
mockToastError.mockReset();
});
@@ -175,7 +177,7 @@ describe('useConversationMessages ACK ordering', () => {
describe('useConversationMessages conversation switch', () => {
beforeEach(() => {
mockGetMessages.mockReset();
messageCache.clear();
conversationMessageCache.clear();
});
it('resets loadingOlder when switching conversations mid-fetch', async () => {
@@ -300,7 +302,7 @@ describe('useConversationMessages conversation switch', () => {
describe('useConversationMessages background reconcile ordering', () => {
beforeEach(() => {
mockGetMessages.mockReset();
messageCache.clear();
conversationMessageCache.clear();
});
it('ignores stale reconnect reconcile responses that finish after newer ones', async () => {
@@ -342,7 +344,7 @@ describe('useConversationMessages background reconcile ordering', () => {
const conv = createConversation();
const cachedMessage = createMessage({ id: 42, text: 'cached snapshot' });
messageCache.set(conv.id, {
conversationMessageCache.set(conv.id, {
messages: [cachedMessage],
hasOlderMessages: true,
});
@@ -362,7 +364,7 @@ describe('useConversationMessages background reconcile ordering', () => {
describe('useConversationMessages older-page dedup and reentry', () => {
beforeEach(() => {
mockGetMessages.mockReset();
messageCache.clear();
conversationMessageCache.clear();
});
it('prevents duplicate overlapping older-page fetches in the same tick', async () => {
@@ -508,7 +510,7 @@ describe('useConversationMessages forward pagination', () => {
beforeEach(() => {
mockGetMessages.mockReset();
mockGetMessagesAround.mockReset();
messageCache.clear();
conversationMessageCache.clear();
mockToastError.mockReset();
});