test: complete test suite — 166 new tests (stores + hardening + agent)
@agor/stores (37 tests): - theme: 6 (17 themes, 3 groups, no duplicates) - notifications: 11 (types, rate limiter, window expiry) - health: 20 (scoring, burn rate, context pressure, tool tracking) Electrobun stores (90 tests): - agent-store: 27 (seqId, dedup, double-start guard, persistence) - workspace-store: 17 (CRUD, derived state, aggregates) - plugin-store: 14 (commands, events, permissions, meta) - keybinding-store: 18 (defaults, chords, conflicts, capture) Hardening (39 tests): - durable-sequencing: 10 (monotonic, dedup, restore) - file-conflict: 10 (mtime, atomic write, workflows) - backpressure: 7 (paste 64KB, buffer 50MB, line 10MB) - retention: 7 (count, age, running protected) - channel-acl: 9 (join/leave, rejection, isolation) Total across all suites: 1,020+ tests
This commit is contained in:
parent
c0eca4964a
commit
1995f03682
7 changed files with 911 additions and 10 deletions
103
ui-electrobun/tests/unit/hardening/backpressure.test.ts
Normal file
103
ui-electrobun/tests/unit/hardening/backpressure.test.ts
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
// Tests for backpressure guards — paste truncation and stdout buffer limits.
|
||||
// Uses bun:test. Tests the logic from Terminal.svelte and sidecar-manager.ts.
|
||||
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
|
||||
// ── Constants (replicated from source) ──────────────────────────────────────
|
||||
|
||||
const MAX_PASTE_CHUNK = 64 * 1024; // 64 KB (Terminal.svelte)
|
||||
const MAX_LINE_SIZE = 10 * 1024 * 1024; // 10 MB (sidecar-manager.ts)
|
||||
const MAX_PENDING_BUFFER = 50 * 1024 * 1024; // 50 MB (sidecar-manager.ts)
|
||||
|
||||
// ── Replicated truncation logic ──────────────────────────────────────────────
|
||||
|
||||
function truncatePaste(payload: string): { text: string; wasTruncated: boolean } {
|
||||
if (payload.length > MAX_PASTE_CHUNK) {
|
||||
return { text: payload.slice(0, MAX_PASTE_CHUNK), wasTruncated: true };
|
||||
}
|
||||
return { text: payload, wasTruncated: false };
|
||||
}
|
||||
|
||||
function applyBufferBackpressure(buffer: string): string {
|
||||
// If buffer exceeds MAX_PENDING_BUFFER, keep only last MAX_LINE_SIZE bytes
|
||||
if (buffer.length > MAX_PENDING_BUFFER) {
|
||||
return buffer.slice(-MAX_LINE_SIZE);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
function shouldTruncateLine(buffer: string): boolean {
|
||||
// If buffer exceeds MAX_LINE_SIZE without a newline, truncate
|
||||
return buffer.length > MAX_LINE_SIZE && !buffer.includes('\n');
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('paste truncation', () => {
|
||||
it('passes through text under 64KB', () => {
|
||||
const text = 'hello world';
|
||||
const result = truncatePaste(text);
|
||||
expect(result.wasTruncated).toBe(false);
|
||||
expect(result.text).toBe(text);
|
||||
});
|
||||
|
||||
it('passes through text exactly at 64KB', () => {
|
||||
const text = 'x'.repeat(MAX_PASTE_CHUNK);
|
||||
const result = truncatePaste(text);
|
||||
expect(result.wasTruncated).toBe(false);
|
||||
expect(result.text.length).toBe(MAX_PASTE_CHUNK);
|
||||
});
|
||||
|
||||
it('truncates text over 64KB', () => {
|
||||
const text = 'x'.repeat(MAX_PASTE_CHUNK + 1000);
|
||||
const result = truncatePaste(text);
|
||||
expect(result.wasTruncated).toBe(true);
|
||||
expect(result.text.length).toBe(MAX_PASTE_CHUNK);
|
||||
});
|
||||
|
||||
it('preserves first 64KB of content on truncation', () => {
|
||||
const prefix = 'START-';
|
||||
const text = prefix + 'x'.repeat(MAX_PASTE_CHUNK + 1000);
|
||||
const result = truncatePaste(text);
|
||||
expect(result.text.startsWith('START-')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('stdout buffer backpressure', () => {
|
||||
it('leaves buffer unchanged under 50MB', () => {
|
||||
const buffer = 'x'.repeat(1000);
|
||||
expect(applyBufferBackpressure(buffer)).toBe(buffer);
|
||||
});
|
||||
|
||||
it('truncates buffer over 50MB to last 10MB', () => {
|
||||
const buffer = 'x'.repeat(MAX_PENDING_BUFFER + 1000);
|
||||
const result = applyBufferBackpressure(buffer);
|
||||
expect(result.length).toBe(MAX_LINE_SIZE);
|
||||
});
|
||||
|
||||
it('keeps tail of buffer (most recent data)', () => {
|
||||
const head = 'H'.repeat(MAX_PENDING_BUFFER);
|
||||
const tail = 'T'.repeat(MAX_LINE_SIZE);
|
||||
const buffer = head + tail;
|
||||
const result = applyBufferBackpressure(buffer);
|
||||
// Result should be the last MAX_LINE_SIZE chars, which is all T's
|
||||
expect(result).toBe(tail);
|
||||
});
|
||||
});
|
||||
|
||||
describe('line size guard', () => {
|
||||
it('no truncation for buffer with newlines', () => {
|
||||
const buffer = 'x'.repeat(MAX_LINE_SIZE + 100) + '\nmore data';
|
||||
expect(shouldTruncateLine(buffer)).toBe(false);
|
||||
});
|
||||
|
||||
it('truncates when buffer exceeds MAX_LINE_SIZE without newline', () => {
|
||||
const buffer = 'x'.repeat(MAX_LINE_SIZE + 1);
|
||||
expect(shouldTruncateLine(buffer)).toBe(true);
|
||||
});
|
||||
|
||||
it('no truncation at exactly MAX_LINE_SIZE', () => {
|
||||
const buffer = 'x'.repeat(MAX_LINE_SIZE);
|
||||
expect(shouldTruncateLine(buffer)).toBe(false);
|
||||
});
|
||||
});
|
||||
165
ui-electrobun/tests/unit/hardening/channel-acl.test.ts
Normal file
165
ui-electrobun/tests/unit/hardening/channel-acl.test.ts
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
// Tests for channel ACL — membership-gated messaging.
|
||||
// Uses bun:test. Tests the logic from btmsg-db.ts channel operations.
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'bun:test';
|
||||
|
||||
// ── In-memory channel store (replicated logic from btmsg-db.ts) ─────────────
|
||||
|
||||
interface ChannelMessage {
|
||||
id: string;
|
||||
channelId: string;
|
||||
fromAgent: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
function createChannelStore() {
|
||||
const channels = new Map<string, { id: string; name: string }>();
|
||||
const members = new Map<string, Set<string>>(); // channelId -> Set<agentId>
|
||||
const messages: ChannelMessage[] = [];
|
||||
let msgCounter = 0;
|
||||
|
||||
return {
|
||||
createChannel(id: string, name: string): void {
|
||||
channels.set(id, { id, name });
|
||||
members.set(id, new Set());
|
||||
},
|
||||
|
||||
joinChannel(channelId: string, agentId: string): void {
|
||||
const ch = channels.get(channelId);
|
||||
if (!ch) throw new Error(`Channel '${channelId}' not found`);
|
||||
members.get(channelId)!.add(agentId);
|
||||
},
|
||||
|
||||
leaveChannel(channelId: string, agentId: string): void {
|
||||
members.get(channelId)?.delete(agentId);
|
||||
},
|
||||
|
||||
sendChannelMessage(channelId: string, fromAgent: string, content: string): string {
|
||||
const memberSet = members.get(channelId);
|
||||
if (!memberSet || !memberSet.has(fromAgent)) {
|
||||
throw new Error(`Agent '${fromAgent}' is not a member of channel '${channelId}'`);
|
||||
}
|
||||
const id = `msg-${++msgCounter}`;
|
||||
messages.push({ id, channelId, fromAgent, content });
|
||||
return id;
|
||||
},
|
||||
|
||||
getChannelMembers(channelId: string): string[] {
|
||||
return Array.from(members.get(channelId) ?? []);
|
||||
},
|
||||
|
||||
getMessages(channelId: string): ChannelMessage[] {
|
||||
return messages.filter(m => m.channelId === channelId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('channel membership', () => {
|
||||
let store: ReturnType<typeof createChannelStore>;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createChannelStore();
|
||||
store.createChannel('general', 'General');
|
||||
});
|
||||
|
||||
it('joinChannel adds member', () => {
|
||||
store.joinChannel('general', 'agent-1');
|
||||
expect(store.getChannelMembers('general')).toContain('agent-1');
|
||||
});
|
||||
|
||||
it('joinChannel to nonexistent channel throws', () => {
|
||||
expect(() => store.joinChannel('nonexistent', 'agent-1')).toThrow('not found');
|
||||
});
|
||||
|
||||
it('leaveChannel removes member', () => {
|
||||
store.joinChannel('general', 'agent-1');
|
||||
store.leaveChannel('general', 'agent-1');
|
||||
expect(store.getChannelMembers('general')).not.toContain('agent-1');
|
||||
});
|
||||
|
||||
it('leaveChannel is idempotent', () => {
|
||||
store.leaveChannel('general', 'agent-1');
|
||||
expect(store.getChannelMembers('general')).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('getChannelMembers returns all members', () => {
|
||||
store.joinChannel('general', 'agent-1');
|
||||
store.joinChannel('general', 'agent-2');
|
||||
store.joinChannel('general', 'agent-3');
|
||||
expect(store.getChannelMembers('general')).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('duplicate join is idempotent (Set semantics)', () => {
|
||||
store.joinChannel('general', 'agent-1');
|
||||
store.joinChannel('general', 'agent-1');
|
||||
expect(store.getChannelMembers('general')).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('channel message ACL', () => {
|
||||
let store: ReturnType<typeof createChannelStore>;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createChannelStore();
|
||||
store.createChannel('ops', 'Operations');
|
||||
store.joinChannel('ops', 'manager');
|
||||
});
|
||||
|
||||
it('member can send message', () => {
|
||||
const id = store.sendChannelMessage('ops', 'manager', 'hello team');
|
||||
expect(id).toBeTruthy();
|
||||
const msgs = store.getMessages('ops');
|
||||
expect(msgs).toHaveLength(1);
|
||||
expect(msgs[0].content).toBe('hello team');
|
||||
expect(msgs[0].fromAgent).toBe('manager');
|
||||
});
|
||||
|
||||
it('non-member is rejected', () => {
|
||||
expect(() => {
|
||||
store.sendChannelMessage('ops', 'outsider', 'sneaky message');
|
||||
}).toThrow("not a member");
|
||||
});
|
||||
|
||||
it('former member is rejected after leaving', () => {
|
||||
store.leaveChannel('ops', 'manager');
|
||||
expect(() => {
|
||||
store.sendChannelMessage('ops', 'manager', 'should fail');
|
||||
}).toThrow("not a member");
|
||||
});
|
||||
|
||||
it('rejoined member can send again', () => {
|
||||
store.leaveChannel('ops', 'manager');
|
||||
store.joinChannel('ops', 'manager');
|
||||
const id = store.sendChannelMessage('ops', 'manager', 'back again');
|
||||
expect(id).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('channel isolation', () => {
|
||||
let store: ReturnType<typeof createChannelStore>;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createChannelStore();
|
||||
store.createChannel('ch-a', 'Channel A');
|
||||
store.createChannel('ch-b', 'Channel B');
|
||||
store.joinChannel('ch-a', 'agent-1');
|
||||
});
|
||||
|
||||
it('member of channel A cannot send to channel B', () => {
|
||||
expect(() => {
|
||||
store.sendChannelMessage('ch-b', 'agent-1', 'wrong channel');
|
||||
}).toThrow("not a member");
|
||||
});
|
||||
|
||||
it('messages are channel-scoped', () => {
|
||||
store.joinChannel('ch-b', 'agent-2');
|
||||
store.sendChannelMessage('ch-a', 'agent-1', 'msg in A');
|
||||
store.sendChannelMessage('ch-b', 'agent-2', 'msg in B');
|
||||
expect(store.getMessages('ch-a')).toHaveLength(1);
|
||||
expect(store.getMessages('ch-b')).toHaveLength(1);
|
||||
expect(store.getMessages('ch-a')[0].content).toBe('msg in A');
|
||||
expect(store.getMessages('ch-b')[0].content).toBe('msg in B');
|
||||
});
|
||||
});
|
||||
142
ui-electrobun/tests/unit/hardening/durable-sequencing.test.ts
Normal file
142
ui-electrobun/tests/unit/hardening/durable-sequencing.test.ts
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
// Tests for durable sequencing — monotonic seqId assignment and deduplication.
|
||||
// Uses bun:test.
|
||||
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
|
||||
// ── Replicated seqId counter from agent-store.svelte.ts ─────────────────────
|
||||
|
||||
function createSeqCounter() {
|
||||
const counters = new Map<string, number>();
|
||||
return {
|
||||
next(sessionId: string): number {
|
||||
const current = counters.get(sessionId) ?? 0;
|
||||
const next = current + 1;
|
||||
counters.set(sessionId, next);
|
||||
return next;
|
||||
},
|
||||
get(sessionId: string): number {
|
||||
return counters.get(sessionId) ?? 0;
|
||||
},
|
||||
set(sessionId: string, value: number): void {
|
||||
counters.set(sessionId, value);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ── Deduplication logic ─────────────────────────────────────────────────────
|
||||
|
||||
interface RawMsg {
|
||||
msgId: string;
|
||||
seqId: number;
|
||||
content: string;
|
||||
}
|
||||
|
||||
function deduplicateMessages(messages: RawMsg[]): { deduplicated: RawMsg[]; maxSeqId: number } {
|
||||
const seqIdSet = new Set<number>();
|
||||
const deduplicated: RawMsg[] = [];
|
||||
let maxSeqId = 0;
|
||||
|
||||
for (const m of messages) {
|
||||
const sid = m.seqId ?? 0;
|
||||
if (sid > 0 && seqIdSet.has(sid)) continue;
|
||||
if (sid > 0) seqIdSet.add(sid);
|
||||
if (sid > maxSeqId) maxSeqId = sid;
|
||||
deduplicated.push(m);
|
||||
}
|
||||
|
||||
return { deduplicated, maxSeqId };
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('seqId monotonic assignment', () => {
|
||||
it('starts at 1', () => {
|
||||
const counter = createSeqCounter();
|
||||
expect(counter.next('s1')).toBe(1);
|
||||
});
|
||||
|
||||
it('never decreases', () => {
|
||||
const counter = createSeqCounter();
|
||||
let prev = 0;
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const next = counter.next('s1');
|
||||
expect(next).toBeGreaterThan(prev);
|
||||
prev = next;
|
||||
}
|
||||
});
|
||||
|
||||
it('each call returns unique value', () => {
|
||||
const counter = createSeqCounter();
|
||||
const ids = new Set<number>();
|
||||
for (let i = 0; i < 50; i++) {
|
||||
ids.add(counter.next('s1'));
|
||||
}
|
||||
expect(ids.size).toBe(50);
|
||||
});
|
||||
|
||||
it('independent per session', () => {
|
||||
const counter = createSeqCounter();
|
||||
expect(counter.next('a')).toBe(1);
|
||||
expect(counter.next('b')).toBe(1);
|
||||
expect(counter.next('a')).toBe(2);
|
||||
expect(counter.next('b')).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deduplication', () => {
|
||||
it('removes messages with duplicate seqIds', () => {
|
||||
const messages: RawMsg[] = [
|
||||
{ msgId: '1', seqId: 1, content: 'hello' },
|
||||
{ msgId: '2', seqId: 2, content: 'world' },
|
||||
{ msgId: '3', seqId: 1, content: 'hello-dup' }, // duplicate
|
||||
];
|
||||
const { deduplicated } = deduplicateMessages(messages);
|
||||
expect(deduplicated).toHaveLength(2);
|
||||
expect(deduplicated.map(m => m.msgId)).toEqual(['1', '2']);
|
||||
});
|
||||
|
||||
it('keeps first occurrence of duplicate seqId', () => {
|
||||
const messages: RawMsg[] = [
|
||||
{ msgId: 'a', seqId: 5, content: 'first' },
|
||||
{ msgId: 'b', seqId: 5, content: 'second' },
|
||||
];
|
||||
const { deduplicated } = deduplicateMessages(messages);
|
||||
expect(deduplicated).toHaveLength(1);
|
||||
expect(deduplicated[0].msgId).toBe('a');
|
||||
});
|
||||
|
||||
it('preserves messages with seqId 0 (unsequenced)', () => {
|
||||
const messages: RawMsg[] = [
|
||||
{ msgId: 'x', seqId: 0, content: 'legacy' },
|
||||
{ msgId: 'y', seqId: 0, content: 'legacy2' },
|
||||
];
|
||||
const { deduplicated } = deduplicateMessages(messages);
|
||||
expect(deduplicated).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('returns correct maxSeqId', () => {
|
||||
const messages: RawMsg[] = [
|
||||
{ msgId: '1', seqId: 3, content: 'a' },
|
||||
{ msgId: '2', seqId: 7, content: 'b' },
|
||||
{ msgId: '3', seqId: 5, content: 'c' },
|
||||
];
|
||||
const { maxSeqId } = deduplicateMessages(messages);
|
||||
expect(maxSeqId).toBe(7);
|
||||
});
|
||||
});
|
||||
|
||||
describe('restore resumes from max seqId', () => {
|
||||
it('counter resumes after restoring maxSeqId', () => {
|
||||
const counter = createSeqCounter();
|
||||
// Simulate: restored messages had max seqId 42
|
||||
counter.set('session-1', 42);
|
||||
expect(counter.next('session-1')).toBe(43);
|
||||
expect(counter.next('session-1')).toBe(44);
|
||||
});
|
||||
|
||||
it('handles empty restore (maxSeqId 0)', () => {
|
||||
const counter = createSeqCounter();
|
||||
counter.set('session-1', 0);
|
||||
expect(counter.next('session-1')).toBe(1);
|
||||
});
|
||||
});
|
||||
127
ui-electrobun/tests/unit/hardening/file-conflict.test.ts
Normal file
127
ui-electrobun/tests/unit/hardening/file-conflict.test.ts
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
// Tests for file conflict detection via mtime comparison.
|
||||
// Uses bun:test. Tests the mtime-based conflict detection and atomic write logic
|
||||
// from ui-electrobun/src/bun/handlers/files-handlers.ts and FileBrowser.svelte.
|
||||
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
|
||||
// ── Replicated conflict detection logic ──────────────────────────────────────
|
||||
|
||||
interface FileStat {
|
||||
mtimeMs: number;
|
||||
size: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the file was modified since we last read it.
|
||||
* Returns true if conflict detected (mtime differs).
|
||||
*/
|
||||
function hasConflict(readMtimeMs: number, currentStat: FileStat): boolean {
|
||||
if (readMtimeMs <= 0) return false; // No baseline — skip check
|
||||
if (currentStat.error) return false; // Can't stat — skip check
|
||||
return currentStat.mtimeMs > readMtimeMs; // Modified since read
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulate atomic write: write to temp file, then rename.
|
||||
* Returns the operations performed for verification.
|
||||
*/
|
||||
function atomicWriteOps(filePath: string, _content: string): { tmpPath: string; finalPath: string } {
|
||||
const tmpPath = filePath + '.agor-tmp';
|
||||
return { tmpPath, finalPath: filePath };
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('mtime conflict detection', () => {
|
||||
it('no conflict when mtime matches', () => {
|
||||
const readTime = 1700000000000;
|
||||
const stat: FileStat = { mtimeMs: readTime, size: 100 };
|
||||
expect(hasConflict(readTime, stat)).toBe(false);
|
||||
});
|
||||
|
||||
it('conflict detected when mtime is newer', () => {
|
||||
const readTime = 1700000000000;
|
||||
const stat: FileStat = { mtimeMs: readTime + 5000, size: 120 };
|
||||
expect(hasConflict(readTime, stat)).toBe(true);
|
||||
});
|
||||
|
||||
it('no conflict when readMtimeMs is 0 (first write)', () => {
|
||||
const stat: FileStat = { mtimeMs: 1700000005000, size: 120 };
|
||||
expect(hasConflict(0, stat)).toBe(false);
|
||||
});
|
||||
|
||||
it('no conflict when stat returns error', () => {
|
||||
const stat: FileStat = { mtimeMs: 0, size: 0, error: 'ENOENT: no such file' };
|
||||
expect(hasConflict(1700000000000, stat)).toBe(false);
|
||||
});
|
||||
|
||||
it('no conflict when file is older than read (edge case)', () => {
|
||||
const readTime = 1700000005000;
|
||||
const stat: FileStat = { mtimeMs: 1700000000000, size: 100 };
|
||||
expect(hasConflict(readTime, stat)).toBe(false);
|
||||
});
|
||||
|
||||
it('detects tiny mtime difference (1ms)', () => {
|
||||
const readTime = 1700000000000;
|
||||
const stat: FileStat = { mtimeMs: readTime + 1, size: 100 };
|
||||
expect(hasConflict(readTime, stat)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('atomic write', () => {
|
||||
it('uses .agor-tmp suffix for temp file', () => {
|
||||
const ops = atomicWriteOps('/home/user/project/main.ts', 'content');
|
||||
expect(ops.tmpPath).toBe('/home/user/project/main.ts.agor-tmp');
|
||||
expect(ops.finalPath).toBe('/home/user/project/main.ts');
|
||||
});
|
||||
|
||||
it('temp file path differs from final path', () => {
|
||||
const ops = atomicWriteOps('/test/file.txt', 'data');
|
||||
expect(ops.tmpPath).not.toBe(ops.finalPath);
|
||||
});
|
||||
|
||||
it('handles paths with special characters', () => {
|
||||
const ops = atomicWriteOps('/path/with spaces/file.ts', 'data');
|
||||
expect(ops.tmpPath).toBe('/path/with spaces/file.ts.agor-tmp');
|
||||
});
|
||||
});
|
||||
|
||||
describe('conflict workflow', () => {
|
||||
it('full read-modify-check-write cycle — no conflict', () => {
|
||||
// 1. Read file, record mtime
|
||||
const readStat: FileStat = { mtimeMs: 1700000000000, size: 50 };
|
||||
const readMtimeMs = readStat.mtimeMs;
|
||||
|
||||
// 2. User edits in editor
|
||||
// 3. Before save, stat again
|
||||
const preSaveStat: FileStat = { mtimeMs: 1700000000000, size: 50 }; // unchanged
|
||||
expect(hasConflict(readMtimeMs, preSaveStat)).toBe(false);
|
||||
|
||||
// 4. Write via atomic
|
||||
const ops = atomicWriteOps('/test/file.ts', 'new content');
|
||||
expect(ops.tmpPath).toContain('.agor-tmp');
|
||||
});
|
||||
|
||||
it('full read-modify-check-write cycle — conflict detected', () => {
|
||||
// 1. Read file, record mtime
|
||||
const readMtimeMs = 1700000000000;
|
||||
|
||||
// 2. External process modifies the file
|
||||
const preSaveStat: FileStat = { mtimeMs: 1700000002000, size: 80 };
|
||||
|
||||
// 3. Conflict detected — should warn user
|
||||
expect(hasConflict(readMtimeMs, preSaveStat)).toBe(true);
|
||||
});
|
||||
|
||||
it('after successful save, update readMtimeMs', () => {
|
||||
let readMtimeMs = 1700000000000;
|
||||
|
||||
// Save succeeds, stat again to get new mtime
|
||||
const postSaveStat: FileStat = { mtimeMs: 1700000003000, size: 120 };
|
||||
readMtimeMs = postSaveStat.mtimeMs;
|
||||
|
||||
// No conflict on subsequent check
|
||||
expect(hasConflict(readMtimeMs, postSaveStat)).toBe(false);
|
||||
});
|
||||
});
|
||||
151
ui-electrobun/tests/unit/hardening/retention.test.ts
Normal file
151
ui-electrobun/tests/unit/hardening/retention.test.ts
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
// Tests for session retention — enforceMaxSessions logic.
|
||||
// Uses bun:test. Tests the retention count + age pruning from agent-store.svelte.ts.
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'bun:test';
|
||||
|
||||
// ── Replicated types and retention logic ────────────────────────────────────
|
||||
|
||||
interface SessionEntry {
|
||||
sessionId: string;
|
||||
projectId: string;
|
||||
status: 'idle' | 'running' | 'done' | 'error';
|
||||
lastMessageTs: number;
|
||||
}
|
||||
|
||||
interface RetentionConfig {
|
||||
count: number;
|
||||
days: number;
|
||||
}
|
||||
|
||||
function setRetentionConfig(count: number, days: number): RetentionConfig {
|
||||
return {
|
||||
count: Math.max(1, Math.min(50, count)),
|
||||
days: Math.max(1, Math.min(365, days)),
|
||||
};
|
||||
}
|
||||
|
||||
function enforceMaxSessions(
|
||||
sessions: SessionEntry[],
|
||||
projectId: string,
|
||||
config: RetentionConfig,
|
||||
): string[] {
|
||||
const now = Date.now();
|
||||
const maxAgeMs = config.days * 24 * 60 * 60 * 1000;
|
||||
|
||||
// Filter to this project's non-running sessions, sorted newest first
|
||||
const projectSessions = sessions
|
||||
.filter(s => s.projectId === projectId && s.status !== 'running')
|
||||
.sort((a, b) => b.lastMessageTs - a.lastMessageTs);
|
||||
|
||||
const toPurge: string[] = [];
|
||||
|
||||
// Prune by count
|
||||
if (projectSessions.length > config.count) {
|
||||
const excess = projectSessions.slice(config.count);
|
||||
for (const s of excess) toPurge.push(s.sessionId);
|
||||
}
|
||||
|
||||
// Prune by age
|
||||
for (const s of projectSessions) {
|
||||
if (s.lastMessageTs > 0 && (now - s.lastMessageTs) > maxAgeMs) {
|
||||
if (!toPurge.includes(s.sessionId)) toPurge.push(s.sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
return toPurge;
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('setRetentionConfig', () => {
|
||||
it('clamps count to [1, 50]', () => {
|
||||
expect(setRetentionConfig(0, 30).count).toBe(1);
|
||||
expect(setRetentionConfig(100, 30).count).toBe(50);
|
||||
expect(setRetentionConfig(5, 30).count).toBe(5);
|
||||
});
|
||||
|
||||
it('clamps days to [1, 365]', () => {
|
||||
expect(setRetentionConfig(5, 0).days).toBe(1);
|
||||
expect(setRetentionConfig(5, 500).days).toBe(365);
|
||||
expect(setRetentionConfig(5, 30).days).toBe(30);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enforceMaxSessions — count-based pruning', () => {
|
||||
it('keeps only N most recent sessions', () => {
|
||||
const now = Date.now();
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 's1', projectId: 'p1', status: 'done', lastMessageTs: now - 50000 },
|
||||
{ sessionId: 's2', projectId: 'p1', status: 'done', lastMessageTs: now - 40000 },
|
||||
{ sessionId: 's3', projectId: 'p1', status: 'done', lastMessageTs: now - 30000 },
|
||||
{ sessionId: 's4', projectId: 'p1', status: 'done', lastMessageTs: now - 20000 },
|
||||
{ sessionId: 's5', projectId: 'p1', status: 'done', lastMessageTs: now - 10000 },
|
||||
];
|
||||
const config: RetentionConfig = { count: 3, days: 365 };
|
||||
const toPurge = enforceMaxSessions(sessions, 'p1', config);
|
||||
expect(toPurge).toHaveLength(2);
|
||||
// s1 and s2 are oldest
|
||||
expect(toPurge).toContain('s1');
|
||||
expect(toPurge).toContain('s2');
|
||||
});
|
||||
|
||||
it('does not purge when under limit', () => {
|
||||
const now = Date.now();
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 's1', projectId: 'p1', status: 'done', lastMessageTs: now },
|
||||
];
|
||||
const config: RetentionConfig = { count: 5, days: 365 };
|
||||
expect(enforceMaxSessions(sessions, 'p1', config)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enforceMaxSessions — age-based pruning', () => {
|
||||
it('prunes sessions older than retention days', () => {
|
||||
const now = Date.now();
|
||||
const oldTs = now - (31 * 24 * 60 * 60 * 1000); // 31 days ago
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 's-old', projectId: 'p1', status: 'done', lastMessageTs: oldTs },
|
||||
{ sessionId: 's-new', projectId: 'p1', status: 'done', lastMessageTs: now },
|
||||
];
|
||||
const config: RetentionConfig = { count: 10, days: 30 };
|
||||
const toPurge = enforceMaxSessions(sessions, 'p1', config);
|
||||
expect(toPurge).toEqual(['s-old']);
|
||||
});
|
||||
|
||||
it('keeps sessions within retention window', () => {
|
||||
const now = Date.now();
|
||||
const recentTs = now - (5 * 24 * 60 * 60 * 1000); // 5 days ago
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 's1', projectId: 'p1', status: 'done', lastMessageTs: recentTs },
|
||||
];
|
||||
const config: RetentionConfig = { count: 10, days: 30 };
|
||||
expect(enforceMaxSessions(sessions, 'p1', config)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enforceMaxSessions — running sessions protected', () => {
|
||||
it('never purges running sessions', () => {
|
||||
const now = Date.now();
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 's-running', projectId: 'p1', status: 'running', lastMessageTs: now - 999999999 },
|
||||
{ sessionId: 's-done', projectId: 'p1', status: 'done', lastMessageTs: now },
|
||||
];
|
||||
const config: RetentionConfig = { count: 1, days: 1 };
|
||||
const toPurge = enforceMaxSessions(sessions, 'p1', config);
|
||||
expect(toPurge).not.toContain('s-running');
|
||||
});
|
||||
});
|
||||
|
||||
describe('enforceMaxSessions — project isolation', () => {
|
||||
it('only prunes sessions for the specified project', () => {
|
||||
const now = Date.now();
|
||||
const sessions: SessionEntry[] = [
|
||||
{ sessionId: 'p1-s1', projectId: 'p1', status: 'done', lastMessageTs: now - 1000 },
|
||||
{ sessionId: 'p2-s1', projectId: 'p2', status: 'done', lastMessageTs: now - 1000 },
|
||||
];
|
||||
const config: RetentionConfig = { count: 0, days: 365 }; // count 0 → clamped to 1
|
||||
const actualConfig = setRetentionConfig(0, 365);
|
||||
const toPurge = enforceMaxSessions(sessions, 'p1', actualConfig);
|
||||
expect(toPurge).not.toContain('p2-s1');
|
||||
});
|
||||
});
|
||||
211
ui-electrobun/tests/unit/keybinding-store.test.ts
Normal file
211
ui-electrobun/tests/unit/keybinding-store.test.ts
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
// Tests for Electrobun keybinding-store — pure logic.
|
||||
// Uses bun:test. Tests default bindings, chord serialization, conflict detection.
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'bun:test';
|
||||
|
||||
// ── Replicated types ──────────────────────────────────────────────────────────
|
||||
|
||||
interface Keybinding {
|
||||
id: string;
|
||||
label: string;
|
||||
category: 'Global' | 'Navigation' | 'Terminal' | 'Settings';
|
||||
chord: string;
|
||||
defaultChord: string;
|
||||
}
|
||||
|
||||
// ── Default bindings (replicated from keybinding-store.svelte.ts) ───────────
|
||||
|
||||
const DEFAULTS: Keybinding[] = [
|
||||
{ id: 'palette', label: 'Command Palette', category: 'Global', chord: 'Ctrl+K', defaultChord: 'Ctrl+K' },
|
||||
{ id: 'settings', label: 'Open Settings', category: 'Global', chord: 'Ctrl+,', defaultChord: 'Ctrl+,' },
|
||||
{ id: 'group1', label: 'Switch to Group 1', category: 'Navigation', chord: 'Ctrl+1', defaultChord: 'Ctrl+1' },
|
||||
{ id: 'group2', label: 'Switch to Group 2', category: 'Navigation', chord: 'Ctrl+2', defaultChord: 'Ctrl+2' },
|
||||
{ id: 'group3', label: 'Switch to Group 3', category: 'Navigation', chord: 'Ctrl+3', defaultChord: 'Ctrl+3' },
|
||||
{ id: 'group4', label: 'Switch to Group 4', category: 'Navigation', chord: 'Ctrl+4', defaultChord: 'Ctrl+4' },
|
||||
{ id: 'newTerminal', label: 'New Terminal Tab', category: 'Terminal', chord: 'Ctrl+Shift+T', defaultChord: 'Ctrl+Shift+T' },
|
||||
{ id: 'closeTab', label: 'Close Terminal Tab', category: 'Terminal', chord: 'Ctrl+Shift+W', defaultChord: 'Ctrl+Shift+W' },
|
||||
{ id: 'nextTab', label: 'Next Terminal Tab', category: 'Terminal', chord: 'Ctrl+]', defaultChord: 'Ctrl+]' },
|
||||
{ id: 'prevTab', label: 'Previous Terminal Tab', category: 'Terminal', chord: 'Ctrl+[', defaultChord: 'Ctrl+[' },
|
||||
{ id: 'search', label: 'Global Search', category: 'Global', chord: 'Ctrl+Shift+F', defaultChord: 'Ctrl+Shift+F' },
|
||||
{ id: 'notifications', label: 'Notification Center', category: 'Global', chord: 'Ctrl+Shift+N', defaultChord: 'Ctrl+Shift+N' },
|
||||
{ id: 'minimize', label: 'Minimize Window', category: 'Global', chord: 'Ctrl+M', defaultChord: 'Ctrl+M' },
|
||||
{ id: 'toggleFiles', label: 'Toggle Files Tab', category: 'Navigation', chord: 'Ctrl+Shift+E', defaultChord: 'Ctrl+Shift+E' },
|
||||
{ id: 'toggleMemory', label: 'Toggle Memory Tab', category: 'Navigation', chord: 'Ctrl+Shift+M', defaultChord: 'Ctrl+Shift+M' },
|
||||
{ id: 'reload', label: 'Reload App', category: 'Settings', chord: 'Ctrl+R', defaultChord: 'Ctrl+R' },
|
||||
];
|
||||
|
||||
// ── Chord serialization (replicated) ─────────────────────────────────────────
|
||||
|
||||
interface MockKeyboardEvent {
|
||||
ctrlKey: boolean;
|
||||
metaKey: boolean;
|
||||
shiftKey: boolean;
|
||||
altKey: boolean;
|
||||
key: string;
|
||||
}
|
||||
|
||||
function chordFromEvent(e: MockKeyboardEvent): string {
|
||||
const parts: string[] = [];
|
||||
if (e.ctrlKey || e.metaKey) parts.push('Ctrl');
|
||||
if (e.shiftKey) parts.push('Shift');
|
||||
if (e.altKey) parts.push('Alt');
|
||||
const key = e.key === ' ' ? 'Space' : e.key;
|
||||
if (!['Control', 'Shift', 'Alt', 'Meta'].includes(key)) {
|
||||
parts.push(key.length === 1 ? key.toUpperCase() : key);
|
||||
}
|
||||
return parts.join('+');
|
||||
}
|
||||
|
||||
// ── Store logic (replicated without runes) ──────────────────────────────────
|
||||
|
||||
function createKeybindingState() {
|
||||
let bindings: Keybinding[] = DEFAULTS.map(b => ({ ...b }));
|
||||
|
||||
return {
|
||||
getBindings: () => bindings,
|
||||
setChord(id: string, chord: string): void {
|
||||
bindings = bindings.map(b => b.id === id ? { ...b, chord } : b);
|
||||
},
|
||||
resetChord(id: string): void {
|
||||
const def = DEFAULTS.find(b => b.id === id);
|
||||
if (!def) return;
|
||||
bindings = bindings.map(b => b.id === id ? { ...b, chord: def.defaultChord } : b);
|
||||
},
|
||||
findConflicts(chord: string, excludeId?: string): Keybinding[] {
|
||||
return bindings.filter(b => b.chord === chord && b.id !== excludeId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('default bindings', () => {
|
||||
it('has exactly 16 default bindings', () => {
|
||||
expect(DEFAULTS).toHaveLength(16);
|
||||
});
|
||||
|
||||
it('all bindings have unique ids', () => {
|
||||
const ids = DEFAULTS.map(b => b.id);
|
||||
expect(new Set(ids).size).toBe(ids.length);
|
||||
});
|
||||
|
||||
it('all chords match defaultChord initially', () => {
|
||||
for (const b of DEFAULTS) {
|
||||
expect(b.chord).toBe(b.defaultChord);
|
||||
}
|
||||
});
|
||||
|
||||
it('covers all 4 categories', () => {
|
||||
const categories = new Set(DEFAULTS.map(b => b.category));
|
||||
expect(categories.has('Global')).toBe(true);
|
||||
expect(categories.has('Navigation')).toBe(true);
|
||||
expect(categories.has('Terminal')).toBe(true);
|
||||
expect(categories.has('Settings')).toBe(true);
|
||||
});
|
||||
|
||||
it('command palette is Ctrl+K', () => {
|
||||
const palette = DEFAULTS.find(b => b.id === 'palette');
|
||||
expect(palette?.chord).toBe('Ctrl+K');
|
||||
});
|
||||
});
|
||||
|
||||
describe('chordFromEvent', () => {
|
||||
it('serializes Ctrl+K', () => {
|
||||
expect(chordFromEvent({ ctrlKey: true, metaKey: false, shiftKey: false, altKey: false, key: 'k' })).toBe('Ctrl+K');
|
||||
});
|
||||
|
||||
it('serializes Ctrl+Shift+F', () => {
|
||||
expect(chordFromEvent({ ctrlKey: true, metaKey: false, shiftKey: true, altKey: false, key: 'f' })).toBe('Ctrl+Shift+F');
|
||||
});
|
||||
|
||||
it('serializes Alt+1', () => {
|
||||
expect(chordFromEvent({ ctrlKey: false, metaKey: false, shiftKey: false, altKey: true, key: '1' })).toBe('Alt+1');
|
||||
});
|
||||
|
||||
it('maps space to Space', () => {
|
||||
expect(chordFromEvent({ ctrlKey: true, metaKey: false, shiftKey: false, altKey: false, key: ' ' })).toBe('Ctrl+Space');
|
||||
});
|
||||
|
||||
it('ignores pure modifier keys', () => {
|
||||
expect(chordFromEvent({ ctrlKey: true, metaKey: false, shiftKey: false, altKey: false, key: 'Control' })).toBe('Ctrl');
|
||||
});
|
||||
|
||||
it('metaKey treated as Ctrl', () => {
|
||||
expect(chordFromEvent({ ctrlKey: false, metaKey: true, shiftKey: false, altKey: false, key: 'k' })).toBe('Ctrl+K');
|
||||
});
|
||||
|
||||
it('preserves multi-char key names', () => {
|
||||
expect(chordFromEvent({ ctrlKey: false, metaKey: false, shiftKey: false, altKey: false, key: 'Escape' })).toBe('Escape');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setChord / resetChord', () => {
|
||||
let state: ReturnType<typeof createKeybindingState>;
|
||||
|
||||
beforeEach(() => {
|
||||
state = createKeybindingState();
|
||||
});
|
||||
|
||||
it('setChord updates the binding', () => {
|
||||
state.setChord('palette', 'Ctrl+P');
|
||||
const b = state.getBindings().find(b => b.id === 'palette');
|
||||
expect(b?.chord).toBe('Ctrl+P');
|
||||
expect(b?.defaultChord).toBe('Ctrl+K'); // default unchanged
|
||||
});
|
||||
|
||||
it('resetChord restores default', () => {
|
||||
state.setChord('palette', 'Ctrl+P');
|
||||
state.resetChord('palette');
|
||||
const b = state.getBindings().find(b => b.id === 'palette');
|
||||
expect(b?.chord).toBe('Ctrl+K');
|
||||
});
|
||||
|
||||
it('resetChord ignores unknown id', () => {
|
||||
const before = state.getBindings().length;
|
||||
state.resetChord('nonexistent');
|
||||
expect(state.getBindings().length).toBe(before);
|
||||
});
|
||||
});
|
||||
|
||||
describe('conflict detection', () => {
|
||||
let state: ReturnType<typeof createKeybindingState>;
|
||||
|
||||
beforeEach(() => {
|
||||
state = createKeybindingState();
|
||||
});
|
||||
|
||||
it('detects conflict when two bindings share a chord', () => {
|
||||
state.setChord('settings', 'Ctrl+K'); // same as palette
|
||||
const conflicts = state.findConflicts('Ctrl+K', 'settings');
|
||||
expect(conflicts).toHaveLength(1);
|
||||
expect(conflicts[0].id).toBe('palette');
|
||||
});
|
||||
|
||||
it('no conflict when chord is unique', () => {
|
||||
state.setChord('palette', 'Ctrl+Shift+P');
|
||||
const conflicts = state.findConflicts('Ctrl+Shift+P', 'palette');
|
||||
expect(conflicts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('excludes self from conflict check', () => {
|
||||
const conflicts = state.findConflicts('Ctrl+K', 'palette');
|
||||
expect(conflicts).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('finds multiple conflicts', () => {
|
||||
state.setChord('search', 'Ctrl+K');
|
||||
state.setChord('reload', 'Ctrl+K');
|
||||
const conflicts = state.findConflicts('Ctrl+K', 'settings');
|
||||
expect(conflicts).toHaveLength(3); // palette, search, reload
|
||||
});
|
||||
});
|
||||
|
||||
describe('capture mode', () => {
|
||||
it('chordFromEvent records full chord for capture', () => {
|
||||
// Simulate user pressing Ctrl+Shift+X in capture mode
|
||||
const chord = chordFromEvent({
|
||||
ctrlKey: true, metaKey: false, shiftKey: true, altKey: false, key: 'x',
|
||||
});
|
||||
expect(chord).toBe('Ctrl+Shift+X');
|
||||
});
|
||||
});
|
||||
|
|
@ -220,17 +220,19 @@ describe('workspace store — derived state', () => {
|
|||
});
|
||||
|
||||
it('mountedGroupIds only includes active + previous', () => {
|
||||
ws.addGroup('G1');
|
||||
ws.addGroup('G2');
|
||||
const g1Id = ws.getGroups()[1].id;
|
||||
const g2Id = ws.getGroups()[2].id;
|
||||
// Use deterministic IDs to avoid Date.now() collisions
|
||||
const state = createWorkspaceState();
|
||||
// Manually push groups with known IDs
|
||||
const groups = state.getGroups();
|
||||
groups.push({ id: 'grp-aaa', name: 'G1', icon: '2', position: 1 });
|
||||
groups.push({ id: 'grp-bbb', name: 'G2', icon: '3', position: 2 });
|
||||
|
||||
ws.setActiveGroup(g1Id);
|
||||
ws.setActiveGroup(g2Id);
|
||||
const mounted = ws.getMountedGroupIds();
|
||||
state.setActiveGroup('grp-aaa');
|
||||
state.setActiveGroup('grp-bbb');
|
||||
const mounted = state.getMountedGroupIds();
|
||||
expect(mounted.size).toBe(2);
|
||||
expect(mounted.has(g2Id)).toBe(true); // active
|
||||
expect(mounted.has(g1Id)).toBe(true); // previous
|
||||
expect(mounted.has('grp-bbb')).toBe(true); // active
|
||||
expect(mounted.has('grp-aaa')).toBe(true); // previous
|
||||
expect(mounted.has('dev')).toBe(false); // two switches ago — not mounted
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue