feat(providers): add Codex and Ollama provider runners with message adapters

This commit is contained in:
Hibryda 2026-03-11 03:56:05 +01:00
parent 4ae7ca6634
commit 3e34fda59a
9 changed files with 985 additions and 2 deletions

View file

@ -14,7 +14,7 @@
"tauri:build": "cargo tauri build", "tauri:build": "cargo tauri build",
"test": "vitest run", "test": "vitest run",
"test:e2e": "wdio run tests/e2e/wdio.conf.js", "test:e2e": "wdio run tests/e2e/wdio.conf.js",
"build:sidecar": "esbuild sidecar/claude-runner.ts --bundle --platform=node --format=esm --outfile=sidecar/dist/claude-runner.mjs" "build:sidecar": "esbuild sidecar/claude-runner.ts --bundle --platform=node --format=esm --outfile=sidecar/dist/claude-runner.mjs && esbuild sidecar/codex-runner.ts --bundle --platform=node --format=esm --outfile=sidecar/dist/codex-runner.mjs && esbuild sidecar/ollama-runner.ts --bundle --platform=node --format=esm --outfile=sidecar/dist/ollama-runner.mjs"
}, },
"devDependencies": { "devDependencies": {
"@sveltejs/vite-plugin-svelte": "^6.2.1", "@sveltejs/vite-plugin-svelte": "^6.2.1",

222
v2/sidecar/codex-runner.ts Normal file
View file

@ -0,0 +1,222 @@
// Codex Runner — Node.js sidecar entry point for OpenAI Codex provider
// Spawned by Rust SidecarManager, communicates via stdio NDJSON
// Uses @openai/codex-sdk for Codex session management
import { stdin, stdout, stderr } from 'process';
import { createInterface } from 'readline';
import { execSync } from 'child_process';
import { existsSync } from 'fs';
import { join } from 'path';
import { homedir } from 'os';
const rl = createInterface({ input: stdin });
const sessions = new Map<string, { controller: AbortController }>();
function send(msg: Record<string, unknown>) {
stdout.write(JSON.stringify(msg) + '\n');
}
function log(message: string) {
stderr.write(`[codex-sidecar] ${message}\n`);
}
rl.on('line', (line: string) => {
try {
const msg = JSON.parse(line);
handleMessage(msg).catch((err: unknown) => {
log(`Unhandled error in message handler: ${err}`);
});
} catch {
log(`Invalid JSON: ${line}`);
}
});
interface QueryMessage {
type: 'query';
sessionId: string;
prompt: string;
cwd?: string;
maxTurns?: number;
resumeSessionId?: string;
permissionMode?: string;
systemPrompt?: string;
model?: string;
providerConfig?: Record<string, unknown>;
}
interface StopMessage {
type: 'stop';
sessionId: string;
}
async function handleMessage(msg: Record<string, unknown>) {
switch (msg.type) {
case 'ping':
send({ type: 'pong' });
break;
case 'query':
await handleQuery(msg as unknown as QueryMessage);
break;
case 'stop':
handleStop(msg as unknown as StopMessage);
break;
default:
send({ type: 'error', message: `Unknown message type: ${msg.type}` });
}
}
async function handleQuery(msg: QueryMessage) {
const { sessionId, prompt, cwd, maxTurns, resumeSessionId, permissionMode, model, providerConfig } = msg;
if (sessions.has(sessionId)) {
send({ type: 'error', sessionId, message: 'Session already running' });
return;
}
log(`Starting Codex session ${sessionId}`);
const controller = new AbortController();
// Strip CODEX*/OPENAI* env vars to prevent nesting issues
const cleanEnv: Record<string, string | undefined> = {};
for (const [key, value] of Object.entries(process.env)) {
if (!key.startsWith('CODEX') && !key.startsWith('OPENAI')) {
cleanEnv[key] = value;
}
}
// Re-inject the API key
const apiKey = process.env.CODEX_API_KEY || process.env.OPENAI_API_KEY;
if (apiKey) {
cleanEnv['CODEX_API_KEY'] = apiKey;
}
// Dynamically import SDK — fails gracefully if not installed
let Codex: any;
try {
const sdk = await import('@openai/codex-sdk');
Codex = sdk.Codex ?? sdk.default;
} catch {
send({ type: 'agent_error', sessionId, message: 'Codex SDK not installed. Run: npm install @openai/codex-sdk' });
return;
}
if (!apiKey) {
send({ type: 'agent_error', sessionId, message: 'No API key. Set CODEX_API_KEY or OPENAI_API_KEY.' });
return;
}
try {
// Map permission mode to Codex sandbox/approval settings
const sandbox = mapSandboxMode(providerConfig?.sandbox as string | undefined, permissionMode);
const approvalPolicy = permissionMode === 'bypassPermissions' ? 'never' : 'on-request';
const codex = new Codex({
env: cleanEnv as Record<string, string>,
config: {
model: model ?? 'gpt-5.4',
approval_policy: approvalPolicy,
sandbox: sandbox,
},
});
const threadOpts: Record<string, unknown> = {
workingDirectory: cwd || process.cwd(),
};
const thread = resumeSessionId
? codex.resumeThread(resumeSessionId)
: codex.startThread(threadOpts);
sessions.set(sessionId, { controller });
send({ type: 'agent_started', sessionId });
const streamResult = await thread.runStreamed(prompt);
for await (const event of streamResult.events) {
if (controller.signal.aborted) break;
// Forward raw Codex events — the message adapter parses them
send({
type: 'agent_event',
sessionId,
event: event as Record<string, unknown>,
});
}
sessions.delete(sessionId);
send({
type: 'agent_stopped',
sessionId,
exitCode: 0,
signal: null,
});
} catch (err: unknown) {
sessions.delete(sessionId);
const errMsg = err instanceof Error ? err.message : String(err);
if (controller.signal.aborted) {
log(`Codex session ${sessionId} aborted`);
send({
type: 'agent_stopped',
sessionId,
exitCode: null,
signal: 'SIGTERM',
});
} else {
log(`Codex session ${sessionId} error: ${errMsg}`);
send({
type: 'agent_error',
sessionId,
message: errMsg,
});
}
}
}
function handleStop(msg: StopMessage) {
const { sessionId } = msg;
const session = sessions.get(sessionId);
if (!session) {
send({ type: 'error', sessionId, message: 'Session not found' });
return;
}
log(`Stopping Codex session ${sessionId}`);
session.controller.abort();
}
function mapSandboxMode(
configSandbox: string | undefined,
permissionMode: string | undefined,
): string {
if (configSandbox) return configSandbox;
if (permissionMode === 'bypassPermissions') return 'danger-full-access';
return 'workspace-write';
}
function findCodexCli(): string | undefined {
const candidates = [
join(homedir(), '.local', 'bin', 'codex'),
'/usr/local/bin/codex',
'/usr/bin/codex',
];
for (const p of candidates) {
if (existsSync(p)) return p;
}
try {
return execSync('which codex 2>/dev/null || where codex 2>nul', { encoding: 'utf-8' }).trim().split('\n')[0];
} catch {
return undefined;
}
}
const codexPath = findCodexCli();
if (codexPath) {
log(`Found Codex CLI at ${codexPath}`);
} else {
log('Codex CLI not found — will use SDK if available');
}
log('Codex sidecar started');
send({ type: 'ready' });

269
v2/sidecar/ollama-runner.ts Normal file
View file

@ -0,0 +1,269 @@
// Ollama Runner — Node.js sidecar entry point for local Ollama provider
// Spawned by Rust SidecarManager, communicates via stdio NDJSON
// Uses direct HTTP to Ollama REST API (no external dependencies)
import { stdin, stdout, stderr } from 'process';
import { createInterface } from 'readline';
const rl = createInterface({ input: stdin });
const sessions = new Map<string, { controller: AbortController }>();
function send(msg: Record<string, unknown>) {
stdout.write(JSON.stringify(msg) + '\n');
}
function log(message: string) {
stderr.write(`[ollama-sidecar] ${message}\n`);
}
rl.on('line', (line: string) => {
try {
const msg = JSON.parse(line);
handleMessage(msg).catch((err: unknown) => {
log(`Unhandled error in message handler: ${err}`);
});
} catch {
log(`Invalid JSON: ${line}`);
}
});
interface QueryMessage {
type: 'query';
sessionId: string;
prompt: string;
cwd?: string;
model?: string;
systemPrompt?: string;
providerConfig?: Record<string, unknown>;
}
interface StopMessage {
type: 'stop';
sessionId: string;
}
async function handleMessage(msg: Record<string, unknown>) {
switch (msg.type) {
case 'ping':
send({ type: 'pong' });
break;
case 'query':
await handleQuery(msg as unknown as QueryMessage);
break;
case 'stop':
handleStop(msg as unknown as StopMessage);
break;
default:
send({ type: 'error', message: `Unknown message type: ${msg.type}` });
}
}
async function handleQuery(msg: QueryMessage) {
const { sessionId, prompt, cwd, model, systemPrompt, providerConfig } = msg;
if (sessions.has(sessionId)) {
send({ type: 'error', sessionId, message: 'Session already running' });
return;
}
const ollamaHost = (providerConfig?.host as string) || process.env.OLLAMA_HOST || 'http://127.0.0.1:11434';
const ollamaModel = model || 'qwen3:8b';
const numCtx = (providerConfig?.num_ctx as number) || 32768;
const think = (providerConfig?.think as boolean) ?? false;
log(`Starting Ollama session ${sessionId} with model ${ollamaModel}`);
// Health check
try {
const healthRes = await fetch(`${ollamaHost}/api/version`);
if (!healthRes.ok) {
send({ type: 'agent_error', sessionId, message: `Ollama not reachable at ${ollamaHost} (HTTP ${healthRes.status})` });
return;
}
} catch (err: unknown) {
const errMsg = err instanceof Error ? err.message : String(err);
send({ type: 'agent_error', sessionId, message: `Cannot connect to Ollama at ${ollamaHost}: ${errMsg}` });
return;
}
const controller = new AbortController();
sessions.set(sessionId, { controller });
send({ type: 'agent_started', sessionId });
// Emit init event
send({
type: 'agent_event',
sessionId,
event: {
type: 'system',
subtype: 'init',
session_id: sessionId,
model: ollamaModel,
cwd: cwd || process.cwd(),
},
});
// Build messages array
const messages: Array<{ role: string; content: string }> = [];
if (systemPrompt && typeof systemPrompt === 'string') {
messages.push({ role: 'system', content: systemPrompt });
}
messages.push({ role: 'user', content: prompt });
try {
const res = await fetch(`${ollamaHost}/api/chat`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: ollamaModel,
messages,
stream: true,
options: { num_ctx: numCtx },
think,
}),
signal: controller.signal,
});
if (!res.ok) {
const errBody = await res.text();
let errMsg: string;
try {
const parsed = JSON.parse(errBody);
errMsg = parsed.error || errBody;
} catch {
errMsg = errBody;
}
send({ type: 'agent_error', sessionId, message: `Ollama error (${res.status}): ${errMsg}` });
sessions.delete(sessionId);
return;
}
if (!res.body) {
send({ type: 'agent_error', sessionId, message: 'No response body from Ollama' });
sessions.delete(sessionId);
return;
}
// Parse NDJSON stream
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (true) {
if (controller.signal.aborted) break;
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
try {
const chunk = JSON.parse(trimmed) as Record<string, unknown>;
// Check for mid-stream error
if (typeof chunk.error === 'string') {
send({
type: 'agent_event',
sessionId,
event: { type: 'error', message: chunk.error },
});
continue;
}
// Forward as chunk event for the message adapter
send({
type: 'agent_event',
sessionId,
event: {
type: 'chunk',
message: chunk.message,
done: chunk.done,
done_reason: chunk.done_reason,
model: chunk.model,
prompt_eval_count: chunk.prompt_eval_count,
eval_count: chunk.eval_count,
eval_duration: chunk.eval_duration,
total_duration: chunk.total_duration,
},
});
} catch {
log(`Failed to parse Ollama chunk: ${trimmed}`);
}
}
}
// Process remaining buffer
if (buffer.trim()) {
try {
const chunk = JSON.parse(buffer.trim()) as Record<string, unknown>;
send({
type: 'agent_event',
sessionId,
event: {
type: 'chunk',
message: chunk.message,
done: chunk.done,
done_reason: chunk.done_reason,
model: chunk.model,
prompt_eval_count: chunk.prompt_eval_count,
eval_count: chunk.eval_count,
eval_duration: chunk.eval_duration,
total_duration: chunk.total_duration,
},
});
} catch {
log(`Failed to parse final Ollama buffer: ${buffer}`);
}
}
sessions.delete(sessionId);
send({
type: 'agent_stopped',
sessionId,
exitCode: 0,
signal: null,
});
} catch (err: unknown) {
sessions.delete(sessionId);
const errMsg = err instanceof Error ? err.message : String(err);
if (controller.signal.aborted) {
log(`Ollama session ${sessionId} aborted`);
send({
type: 'agent_stopped',
sessionId,
exitCode: null,
signal: 'SIGTERM',
});
} else {
log(`Ollama session ${sessionId} error: ${errMsg}`);
send({
type: 'agent_error',
sessionId,
message: errMsg,
});
}
}
}
function handleStop(msg: StopMessage) {
const { sessionId } = msg;
const session = sessions.get(sessionId);
if (!session) {
send({ type: 'error', sessionId, message: 'Session not found' });
return;
}
log(`Stopping Ollama session ${sessionId}`);
session.controller.abort();
}
log('Ollama sidecar started');
send({ type: 'ready' });

View file

@ -7,6 +7,8 @@
import { startHealthTick, stopHealthTick, clearHealthTracking } from './lib/stores/health.svelte'; import { startHealthTick, stopHealthTick, clearHealthTracking } from './lib/stores/health.svelte';
import { registerProvider } from './lib/providers/registry.svelte'; import { registerProvider } from './lib/providers/registry.svelte';
import { CLAUDE_PROVIDER } from './lib/providers/claude'; import { CLAUDE_PROVIDER } from './lib/providers/claude';
import { CODEX_PROVIDER } from './lib/providers/codex';
import { OLLAMA_PROVIDER } from './lib/providers/ollama';
import { loadWorkspace, getActiveTab, setActiveTab, setActiveProject, getEnabledProjects } from './lib/stores/workspace.svelte'; import { loadWorkspace, getActiveTab, setActiveTab, setActiveProject, getEnabledProjects } from './lib/stores/workspace.svelte';
// Workspace components // Workspace components
@ -68,6 +70,8 @@
if (v) document.documentElement.style.setProperty('--project-max-aspect', v); if (v) document.documentElement.style.setProperty('--project-max-aspect', v);
}); });
registerProvider(CLAUDE_PROVIDER); registerProvider(CLAUDE_PROVIDER);
registerProvider(CODEX_PROVIDER);
registerProvider(OLLAMA_PROVIDER);
startAgentDispatcher(); startAgentDispatcher();
startHealthTick(); startHealthTick();

View file

@ -0,0 +1,297 @@
// Codex Message Adapter — transforms Codex CLI NDJSON events to internal AgentMessage format
// Codex events: thread.started, turn.started, item.started/updated/completed, turn.completed/failed
import type {
AgentMessage,
InitContent,
TextContent,
ThinkingContent,
ToolCallContent,
ToolResultContent,
StatusContent,
CostContent,
ErrorContent,
} from './claude-messages';
function str(v: unknown, fallback = ''): string {
return typeof v === 'string' ? v : fallback;
}
function num(v: unknown, fallback = 0): number {
return typeof v === 'number' ? v : fallback;
}
export function adaptCodexMessage(raw: Record<string, unknown>): AgentMessage[] {
const timestamp = Date.now();
const uuid = crypto.randomUUID();
switch (raw.type) {
case 'thread.started':
return [{
id: uuid,
type: 'init',
content: {
sessionId: str(raw.thread_id),
model: '',
cwd: '',
tools: [],
} satisfies InitContent,
timestamp,
}];
case 'turn.started':
return [{
id: uuid,
type: 'status',
content: { subtype: 'turn_started' } satisfies StatusContent,
timestamp,
}];
case 'turn.completed':
return adaptTurnCompleted(raw, uuid, timestamp);
case 'turn.failed':
return [{
id: uuid,
type: 'error',
content: {
message: str((raw.error as Record<string, unknown>)?.message, 'Turn failed'),
} satisfies ErrorContent,
timestamp,
}];
case 'item.started':
case 'item.updated':
case 'item.completed':
return adaptItem(raw, uuid, timestamp);
case 'error':
return [{
id: uuid,
type: 'error',
content: { message: str(raw.message, 'Unknown error') } satisfies ErrorContent,
timestamp,
}];
default:
return [{
id: uuid,
type: 'unknown',
content: raw,
timestamp,
}];
}
}
function adaptTurnCompleted(
raw: Record<string, unknown>,
uuid: string,
timestamp: number,
): AgentMessage[] {
const usage = typeof raw.usage === 'object' && raw.usage !== null
? raw.usage as Record<string, unknown>
: {};
return [{
id: uuid,
type: 'cost',
content: {
totalCostUsd: 0,
durationMs: 0,
inputTokens: num(usage.input_tokens),
outputTokens: num(usage.output_tokens),
numTurns: 1,
isError: false,
} satisfies CostContent,
timestamp,
}];
}
function adaptItem(
raw: Record<string, unknown>,
uuid: string,
timestamp: number,
): AgentMessage[] {
const item = typeof raw.item === 'object' && raw.item !== null
? raw.item as Record<string, unknown>
: {};
const itemType = str(item.type);
const eventType = str(raw.type);
switch (itemType) {
case 'agent_message':
if (eventType !== 'item.completed') return [];
return [{
id: uuid,
type: 'text',
content: { text: str(item.text) } satisfies TextContent,
timestamp,
}];
case 'reasoning':
if (eventType !== 'item.completed') return [];
return [{
id: uuid,
type: 'thinking',
content: { text: str(item.text) } satisfies ThinkingContent,
timestamp,
}];
case 'command_execution':
return adaptCommandExecution(item, uuid, timestamp, eventType);
case 'file_change':
return adaptFileChange(item, uuid, timestamp, eventType);
case 'mcp_tool_call':
return adaptMcpToolCall(item, uuid, timestamp, eventType);
case 'web_search':
if (eventType !== 'item.completed') return [];
return [{
id: uuid,
type: 'tool_call',
content: {
toolUseId: str(item.id, uuid),
name: 'WebSearch',
input: { query: str(item.query) },
} satisfies ToolCallContent,
timestamp,
}];
case 'error':
return [{
id: uuid,
type: 'error',
content: { message: str(item.message, 'Item error') } satisfies ErrorContent,
timestamp,
}];
default:
return [];
}
}
function adaptCommandExecution(
item: Record<string, unknown>,
uuid: string,
timestamp: number,
eventType: string,
): AgentMessage[] {
const messages: AgentMessage[] = [];
const toolUseId = str(item.id, uuid);
if (eventType === 'item.started' || eventType === 'item.completed') {
messages.push({
id: `${uuid}-call`,
type: 'tool_call',
content: {
toolUseId,
name: 'Bash',
input: { command: str(item.command) },
} satisfies ToolCallContent,
timestamp,
});
}
if (eventType === 'item.completed') {
messages.push({
id: `${uuid}-result`,
type: 'tool_result',
content: {
toolUseId,
output: str(item.aggregated_output),
} satisfies ToolResultContent,
timestamp,
});
}
return messages;
}
function adaptFileChange(
item: Record<string, unknown>,
uuid: string,
timestamp: number,
eventType: string,
): AgentMessage[] {
if (eventType !== 'item.completed') return [];
const changes = Array.isArray(item.changes) ? item.changes as Array<Record<string, unknown>> : [];
if (changes.length === 0) return [];
const messages: AgentMessage[] = [];
for (const change of changes) {
const kind = str(change.kind);
const toolName = kind === 'delete' ? 'Bash' : kind === 'add' ? 'Write' : 'Edit';
const toolUseId = `${uuid}-${str(change.path)}`;
messages.push({
id: `${toolUseId}-call`,
type: 'tool_call',
content: {
toolUseId,
name: toolName,
input: { file_path: str(change.path) },
} satisfies ToolCallContent,
timestamp,
});
messages.push({
id: `${toolUseId}-result`,
type: 'tool_result',
content: {
toolUseId,
output: `File ${kind}: ${str(change.path)}`,
} satisfies ToolResultContent,
timestamp,
});
}
return messages;
}
function adaptMcpToolCall(
item: Record<string, unknown>,
uuid: string,
timestamp: number,
eventType: string,
): AgentMessage[] {
const messages: AgentMessage[] = [];
const toolUseId = str(item.id, uuid);
const toolName = `${str(item.server)}:${str(item.tool)}`;
if (eventType === 'item.started' || eventType === 'item.completed') {
messages.push({
id: `${uuid}-call`,
type: 'tool_call',
content: {
toolUseId,
name: toolName,
input: item.arguments,
} satisfies ToolCallContent,
timestamp,
});
}
if (eventType === 'item.completed') {
const result = typeof item.result === 'object' && item.result !== null
? item.result as Record<string, unknown>
: undefined;
const error = typeof item.error === 'object' && item.error !== null
? item.error as Record<string, unknown>
: undefined;
messages.push({
id: `${uuid}-result`,
type: 'tool_result',
content: {
toolUseId,
output: error ? str(error.message, 'MCP tool error') : (result?.content ?? result?.structured_content ?? 'OK'),
} satisfies ToolResultContent,
timestamp,
});
}
return messages;
}

View file

@ -4,6 +4,8 @@
import type { AgentMessage } from './claude-messages'; import type { AgentMessage } from './claude-messages';
import type { ProviderId } from '../providers/types'; import type { ProviderId } from '../providers/types';
import { adaptSDKMessage } from './claude-messages'; import { adaptSDKMessage } from './claude-messages';
import { adaptCodexMessage } from './codex-messages';
import { adaptOllamaMessage } from './ollama-messages';
/** Function signature for a provider message adapter */ /** Function signature for a provider message adapter */
export type MessageAdapter = (raw: Record<string, unknown>) => AgentMessage[]; export type MessageAdapter = (raw: Record<string, unknown>) => AgentMessage[];
@ -25,5 +27,7 @@ export function adaptMessage(providerId: ProviderId, raw: Record<string, unknown
return adapter(raw); return adapter(raw);
} }
// Register Claude adapter by default // Register all provider adapters
registerMessageAdapter('claude', adaptSDKMessage); registerMessageAdapter('claude', adaptSDKMessage);
registerMessageAdapter('codex', adaptCodexMessage);
registerMessageAdapter('ollama', adaptOllamaMessage);

View file

@ -0,0 +1,147 @@
// Ollama Message Adapter — transforms Ollama chat streaming events to internal AgentMessage format
// Ollama runner emits synthesized events wrapping /api/chat NDJSON chunks
import type {
AgentMessage,
InitContent,
TextContent,
ThinkingContent,
StatusContent,
CostContent,
ErrorContent,
} from './claude-messages';
function str(v: unknown, fallback = ''): string {
return typeof v === 'string' ? v : fallback;
}
function num(v: unknown, fallback = 0): number {
return typeof v === 'number' ? v : fallback;
}
/**
* Adapt a raw Ollama runner event to AgentMessage[].
*
* The Ollama runner emits events in this format:
* - {type:'system', subtype:'init', model, ...}
* - {type:'chunk', message:{role,content,thinking}, done:false}
* - {type:'chunk', message:{role,content}, done:true, done_reason, prompt_eval_count, eval_count, ...}
* - {type:'error', message:'...'}
*/
export function adaptOllamaMessage(raw: Record<string, unknown>): AgentMessage[] {
const timestamp = Date.now();
const uuid = crypto.randomUUID();
switch (raw.type) {
case 'system':
return adaptSystemEvent(raw, uuid, timestamp);
case 'chunk':
return adaptChunk(raw, uuid, timestamp);
case 'error':
return [{
id: uuid,
type: 'error',
content: { message: str(raw.message, 'Ollama error') } satisfies ErrorContent,
timestamp,
}];
default:
return [{
id: uuid,
type: 'unknown',
content: raw,
timestamp,
}];
}
}
function adaptSystemEvent(
raw: Record<string, unknown>,
uuid: string,
timestamp: number,
): AgentMessage[] {
const subtype = str(raw.subtype);
if (subtype === 'init') {
return [{
id: uuid,
type: 'init',
content: {
sessionId: str(raw.session_id),
model: str(raw.model),
cwd: str(raw.cwd),
tools: [],
} satisfies InitContent,
timestamp,
}];
}
return [{
id: uuid,
type: 'status',
content: {
subtype,
message: typeof raw.status === 'string' ? raw.status : undefined,
} satisfies StatusContent,
timestamp,
}];
}
function adaptChunk(
raw: Record<string, unknown>,
uuid: string,
timestamp: number,
): AgentMessage[] {
const messages: AgentMessage[] = [];
const msg = typeof raw.message === 'object' && raw.message !== null
? raw.message as Record<string, unknown>
: {};
const done = raw.done === true;
// Thinking content (extended thinking from Qwen3 etc.)
const thinking = str(msg.thinking);
if (thinking) {
messages.push({
id: `${uuid}-think`,
type: 'thinking',
content: { text: thinking } satisfies ThinkingContent,
timestamp,
});
}
// Text content
const text = str(msg.content);
if (text) {
messages.push({
id: `${uuid}-text`,
type: 'text',
content: { text } satisfies TextContent,
timestamp,
});
}
// Final chunk with token counts
if (done) {
const doneReason = str(raw.done_reason);
const evalDuration = num(raw.eval_duration);
const durationMs = evalDuration > 0 ? Math.round(evalDuration / 1_000_000) : 0;
messages.push({
id: `${uuid}-cost`,
type: 'cost',
content: {
totalCostUsd: 0,
durationMs,
inputTokens: num(raw.prompt_eval_count),
outputTokens: num(raw.eval_count),
numTurns: 1,
isError: doneReason === 'error',
} satisfies CostContent,
timestamp,
});
}
return messages;
}

View file

@ -0,0 +1,20 @@
// Codex Provider — metadata and capabilities for OpenAI Codex CLI
import type { ProviderMeta } from './types';
export const CODEX_PROVIDER: ProviderMeta = {
id: 'codex',
name: 'Codex CLI',
description: 'OpenAI Codex CLI agent via SDK',
capabilities: {
hasProfiles: false,
hasSkills: false,
hasModelSelection: true,
hasSandbox: true,
supportsSubagents: false,
supportsCost: false,
supportsResume: true,
},
sidecarRunner: 'codex-runner.mjs',
defaultModel: 'gpt-5.4',
};

View file

@ -0,0 +1,20 @@
// Ollama Provider — metadata and capabilities for local Ollama models
import type { ProviderMeta } from './types';
export const OLLAMA_PROVIDER: ProviderMeta = {
id: 'ollama',
name: 'Ollama',
description: 'Local Ollama models via REST API',
capabilities: {
hasProfiles: false,
hasSkills: false,
hasModelSelection: true,
hasSandbox: false,
supportsSubagents: false,
supportsCost: false,
supportsResume: false,
},
sidecarRunner: 'ollama-runner.mjs',
defaultModel: 'qwen3:8b',
};