Remove mock data from harness and add agent credential healthchecks
All checks were successful
CI / lint-and-test (push) Successful in 25s
Deploy Production / deploy (push) Successful in 59s
CI / build (push) Successful in 1m11s

Strip all seed/mock data (fake tasks, models, usage entries, agent configs)
so the dashboard starts clean and populates from real API state. Add
/api/agents/health endpoint that validates each agent's provider credentials
and CLI availability.
This commit is contained in:
Julia McGhee
2026-03-21 19:42:53 +00:00
parent 9a40240bd2
commit df1111da15
4 changed files with 140 additions and 176 deletions

View File

@@ -77,61 +77,6 @@ export function deleteAgentConfig(id: string): boolean {
return configs.delete(id);
}
// ─── SEED DATA ──────────────────────────────────────────────
const SEED_CONFIGS: AgentConfig[] = [
{
id: "agent-claude-opus",
name: "Claude Code · Opus 4",
runtime: "claude-code",
modelId: "claude-opus-4-20250514",
provider: "anthropic",
},
{
id: "agent-claude-sonnet",
name: "Claude Code · Sonnet 4",
runtime: "claude-code",
modelId: "claude-sonnet-4-20250514",
provider: "anthropic",
},
{
id: "agent-codex-o3",
name: "Codex · o3",
runtime: "codex",
modelId: "o3",
provider: "openai",
},
{
id: "agent-codex-o4mini",
name: "Codex · o4-mini",
runtime: "codex",
modelId: "o4-mini",
provider: "openai",
},
{
id: "agent-opencode-sonnet",
name: "OpenCode · Sonnet 4",
runtime: "opencode",
modelId: "claude-sonnet-4-20250514",
provider: "anthropic",
},
{
id: "agent-opencode-gemini",
name: "OpenCode · Gemini 2.5 Pro",
runtime: "opencode",
modelId: "gemini-2.5-pro",
provider: "google",
},
];
function seedAgents() {
if (configs.size > 0) return;
for (const c of SEED_CONFIGS) {
configs.set(c.id, c);
}
}
seedAgents();
// ─── CLI BUILDER ────────────────────────────────────────────
// Builds the shell command to invoke an agent headlessly.

View File

@@ -114,41 +114,3 @@ export function getUsageSummary(): ModelUsageSummary[] {
return Array.from(grouped.values()).sort((a, b) => b.totalCost - a.totalCost);
}
// ─── SEED DATA ──────────────────────────────────────────────
// Pre-populate with well-known models and pricing
const SEED_MODELS: Omit<CuratedModel, "enabled">[] = [
{ id: "claude-opus-4-20250514", name: "Claude Opus 4", provider: "anthropic", contextWindow: 200000, costPer1kInput: 0.015, costPer1kOutput: 0.075 },
{ id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4", provider: "anthropic", contextWindow: 200000, costPer1kInput: 0.003, costPer1kOutput: 0.015 },
{ id: "claude-haiku-4-20250514", name: "Claude Haiku 4", provider: "anthropic", contextWindow: 200000, costPer1kInput: 0.0008, costPer1kOutput: 0.004 },
{ id: "gpt-4o", name: "GPT-4o", provider: "openai", contextWindow: 128000, costPer1kInput: 0.0025, costPer1kOutput: 0.01 },
{ id: "gpt-4o-mini", name: "GPT-4o Mini", provider: "openai", contextWindow: 128000, costPer1kInput: 0.00015,costPer1kOutput: 0.0006 },
{ id: "o3", name: "o3", provider: "openai", contextWindow: 200000, costPer1kInput: 0.01, costPer1kOutput: 0.04 },
{ id: "o4-mini", name: "o4 Mini", provider: "openai", contextWindow: 200000, costPer1kInput: 0.0011, costPer1kOutput: 0.0044 },
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro", provider: "google", contextWindow: 1048576,costPer1kInput: 0.00125,costPer1kOutput: 0.01 },
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash", provider: "google", contextWindow: 1048576,costPer1kInput: 0.00015,costPer1kOutput: 0.0006 },
];
const SEED_USAGE: Omit<ModelUsageEntry, "timestamp">[] = [
{ modelId: "claude-sonnet-4-20250514", provider: "anthropic", taskId: "task-002", taskSlug: "haiku-moderation-tier2", iteration: 1, inputTokens: 48200, outputTokens: 12400, durationMs: 34000 },
{ modelId: "claude-sonnet-4-20250514", provider: "anthropic", taskId: "task-002", taskSlug: "haiku-moderation-tier2", iteration: 2, inputTokens: 52100, outputTokens: 15800, durationMs: 41000 },
{ modelId: "claude-sonnet-4-20250514", provider: "anthropic", taskId: "task-002", taskSlug: "haiku-moderation-tier2", iteration: 3, inputTokens: 61300, outputTokens: 18200, durationMs: 45000 },
{ modelId: "claude-sonnet-4-20250514", provider: "anthropic", taskId: "task-002", taskSlug: "haiku-moderation-tier2", iteration: 4, inputTokens: 55000, outputTokens: 14600, durationMs: 38000 },
{ modelId: "claude-opus-4-20250514", provider: "anthropic", taskId: "task-001", taskSlug: "pubsub-pipeline-migration", iteration: 1, inputTokens: 85400, outputTokens: 28900, durationMs: 92000 },
{ modelId: "claude-opus-4-20250514", provider: "anthropic", taskId: "task-001", taskSlug: "pubsub-pipeline-migration", iteration: 2, inputTokens: 91200, outputTokens: 31400, durationMs: 98000 },
{ modelId: "claude-opus-4-20250514", provider: "anthropic", taskId: "task-001", taskSlug: "pubsub-pipeline-migration", iteration: 3, inputTokens: 78600, outputTokens: 22100, durationMs: 85000 },
{ modelId: "gpt-4o", provider: "openai", taskId: "task-001", taskSlug: "pubsub-pipeline-migration", iteration: 1, inputTokens: 42000, outputTokens: 9800, durationMs: 28000 },
];
export function seedData() {
if (curatedModels.size > 0) return; // already seeded
for (const m of SEED_MODELS) {
curatedModels.set(m.id, { ...m, enabled: true });
}
const now = Date.now();
for (let i = 0; i < SEED_USAGE.length; i++) {
usageLog.push({ ...SEED_USAGE[i], timestamp: now - (SEED_USAGE.length - i) * 1000 * 60 * 30 });
}
}
seedData();