Migrate harness from in-memory stores to CloudNativePG
Some checks failed
CI / lint-and-test (push) Successful in 22s
Deploy Production / deploy (push) Failing after 21s
CI / build (push) Failing after 1m51s

Replace all in-memory Map-backed stores (credentials, models, agents,
tasks, iterations, usage) with Drizzle ORM queries against the
homelab-pg PostgreSQL cluster. All store functions are now async.

- Add 6 harness_* tables to @homelab/db schema
- Generate and apply initial Drizzle migration
- Add lazy DB connection proxy to avoid build-time errors
- Wire DATABASE_URL from sealed secret into harness deployment
- Update all API routes, orchestrator, executor, and boot to await
  async store operations
This commit is contained in:
Julia McGhee
2026-03-21 20:17:00 +00:00
parent df351439d6
commit 3fe75a8e04
28 changed files with 1245 additions and 304 deletions

View File

@@ -28,6 +28,11 @@ spec:
value: /secrets/claude
- name: OPENCODE_CONFIG_DIR
value: /secrets/opencode
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: harness-db-credentials
key: database-url
volumeMounts:
- name: workspace
mountPath: /data/harness

View File

@@ -0,0 +1,13 @@
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
name: harness-db-credentials
namespace: apps
spec:
encryptedData:
database-url: AgA3P5VHDwMKFvoF8YWox4KkFSVhHfSFFA1iT9vJEz4Iw8V4U2tkYJ9VRzCXo8mcRmCxm1/9o5gFZ2BX9BsSLW8kdcJi0YEQ7ieHHBO4NElpkYPbuYCc3MJnpd7YycOciLjcBVzuR4gbNl1PCiFtDY82+Kb0DLxclWXrPRuLxcT2hyMwHzYAfxDHdbgvcDEQ4pK+hZ6R2b7Hror29TvXDIMvdj015ertU8XmsSj/CbDUv8AY/CpyQIymIvZrTmdQx2smBN6HfTNvk/YxWukM0p6pUAWoJ+ylqq6Q+dNDKZpv417T8GHqyd/4bfQOVbkUVClcA3Sil1BNUxm8S/AA8iJIFS+6i2MyN0bRPrfV7AGTT17CdlRmyu35wRX+QLO774MRDzXnJu6aTEyoupFaTxG5qGaGU9Sd9bo+nvi72pClMMsmjTZJNruqKjhilq05jmqJbrCq5x+zJm/jXWQL9jL/K2jVsoOctNSJFu/vIqGOhArspKKMaEQykNbL4Hdmdlzf2cxCB4zrNN8/EfYMalS+mkr9/9heX87n5HU63ndOH6k8jWS49osxi2Upe17STdC5jAHvJnZpxmwpC7x/fo3YaEDT11Q3/Oz5oZ+YU7N7EP3tl57OLYhsYMRcAt+baNjxuvh1YB8r3lThtwjJu5Tj8MSEAWxRAP4TAvI/VQdnHKh3S8jhWyV4rH26dJlUXiRTCLfsjeC1LNwdV4mcdvZQoWROHNuUchp0Q0g6qaor2lJTLKiLcrTtb9+dHvKdYW2ImnBmDgXfxOXc5eSQaISv0BFlrnPAqkjNSUOqi+Ts1LKc/0Fm1RvzW61z4A==
template:
metadata:
name: harness-db-credentials
namespace: apps

View File

@@ -5,3 +5,4 @@ resources:
- service.yaml
- harness-claude-credentials-sealed.yaml
- harness-opencode-credentials-sealed.yaml
- harness-db-credentials-sealed.yaml

View File

@@ -10,7 +10,10 @@
"test": "echo \"no tests yet\""
},
"dependencies": {
"@homelab/db": "workspace:^",
"drizzle-orm": "^0.36.0",
"next": "^15.1.0",
"postgres": "^3.4.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"yaml": "^2.7.0"

View File

@@ -2,14 +2,6 @@ import { NextResponse } from "next/server";
import { getAllAgentConfigs, AGENT_RUNTIMES, AgentConfig } from "@/lib/agents";
import { getRawCredentialsByProvider, Provider } from "@/lib/credentials";
const PROVIDER_ENV_VARS: Record<string, string> = {
anthropic: "ANTHROPIC_API_KEY",
openai: "OPENAI_API_KEY",
google: "GOOGLE_API_KEY",
openrouter: "OPENROUTER_API_KEY",
"opencode-zen": "OPENCODE_ZEN_API_KEY",
};
const PROVIDER_VALIDATION: Record<string, (token: string, baseUrl?: string) => Promise<boolean>> = {
async anthropic(token) {
const res = await fetch("https://api.anthropic.com/v1/models", {
@@ -44,8 +36,8 @@ export interface AgentHealthStatus {
provider: string;
modelId: string;
credentialConfigured: boolean;
credentialValid: boolean | null; // null = not checked (no credential)
cliInstalled: boolean | null; // null = not checked
credentialValid: boolean | null;
cliInstalled: boolean | null;
error?: string;
}
@@ -72,22 +64,17 @@ async function checkAgent(config: AgentConfig): Promise<AgentHealthStatus> {
cliInstalled: null,
};
// Check CLI
try {
status.cliInstalled = await checkCliInstalled(runtime.cliCommand);
} catch {
status.cliInstalled = false;
}
// Check credential exists
const creds = getRawCredentialsByProvider(config.provider as Provider);
const creds = await getRawCredentialsByProvider(config.provider as Provider);
status.credentialConfigured = creds.length > 0;
if (!status.credentialConfigured) {
return status;
}
if (!status.credentialConfigured) return status;
// Validate credential against provider API
const validator = PROVIDER_VALIDATION[config.provider];
if (validator) {
try {
@@ -96,16 +83,13 @@ async function checkAgent(config: AgentConfig): Promise<AgentHealthStatus> {
status.credentialValid = false;
status.error = err instanceof Error ? err.message : "Validation failed";
}
} else {
// No validator for this provider (e.g. opencode-zen) — just confirm credential exists
status.credentialValid = null;
}
return status;
}
export async function GET() {
const configs = getAllAgentConfigs();
const configs = await getAllAgentConfigs();
if (configs.length === 0) {
return NextResponse.json({

View File

@@ -9,7 +9,7 @@ import {
export async function GET() {
return NextResponse.json({
configs: getAllAgentConfigs(),
configs: await getAllAgentConfigs(),
runtimes: Object.values(AGENT_RUNTIMES),
});
}
@@ -41,12 +41,12 @@ export async function POST(request: NextRequest) {
env: body.env,
};
return NextResponse.json(upsertAgentConfig(config), { status: 201 });
return NextResponse.json(await upsertAgentConfig(config), { status: 201 });
}
export async function DELETE(request: NextRequest) {
const id = request.nextUrl.searchParams.get("id");
if (!id) return NextResponse.json({ error: "id required" }, { status: 400 });
deleteAgentConfig(id);
await deleteAgentConfig(id);
return NextResponse.json({ ok: true });
}

View File

@@ -11,20 +11,20 @@ import {
export async function GET(request: NextRequest) {
const enabledOnly = request.nextUrl.searchParams.get("enabled") === "true";
return NextResponse.json(enabledOnly ? getEnabledModels() : getCuratedModels());
return NextResponse.json(enabledOnly ? await getEnabledModels() : await getCuratedModels());
}
export async function POST(request: NextRequest) {
const body = await request.json();
if (body.action === "toggle" && body.id) {
const result = toggleModelEnabled(body.id);
const result = await toggleModelEnabled(body.id);
if (!result) return NextResponse.json({ error: "not found" }, { status: 404 });
return NextResponse.json(result);
}
if (body.action === "update-cost" && body.id) {
const result = updateModelCost(body.id, body.costPer1kInput, body.costPer1kOutput);
const result = await updateModelCost(body.id, body.costPer1kInput, body.costPer1kOutput);
if (!result) return NextResponse.json({ error: "not found" }, { status: 404 });
return NextResponse.json(result);
}
@@ -43,12 +43,12 @@ export async function POST(request: NextRequest) {
costPer1kOutput: body.costPer1kOutput,
};
return NextResponse.json(upsertCuratedModel(model), { status: 201 });
return NextResponse.json(await upsertCuratedModel(model), { status: 201 });
}
export async function DELETE(request: NextRequest) {
const id = request.nextUrl.searchParams.get("id");
if (!id) return NextResponse.json({ error: "id required" }, { status: 400 });
removeCuratedModel(id);
await removeCuratedModel(id);
return NextResponse.json({ ok: true });
}

View File

@@ -2,8 +2,6 @@ import { NextResponse } from "next/server";
import { getUsageSummary, getUsageLog } from "@/lib/model-store";
export async function GET() {
return NextResponse.json({
summary: getUsageSummary(),
log: getUsageLog(),
});
const [summary, log] = await Promise.all([getUsageSummary(), getUsageLog()]);
return NextResponse.json({ summary, log });
}

View File

@@ -14,9 +14,9 @@ const VALID_PROVIDERS = [...GIT_PROVIDERS, ...AI_PROVIDERS];
export async function GET(request: NextRequest) {
const kind = request.nextUrl.searchParams.get("kind");
if (kind === "git" || kind === "ai") {
return NextResponse.json(getCredentialsByKind(kind));
return NextResponse.json(await getCredentialsByKind(kind));
}
return NextResponse.json(getAllCredentials());
return NextResponse.json(await getAllCredentials());
}
export async function POST(request: NextRequest) {
@@ -44,7 +44,7 @@ export async function POST(request: NextRequest) {
baseUrl: body.baseUrl,
};
const saved = upsertCredential(cred);
const saved = await upsertCredential(cred);
return NextResponse.json(saved, { status: 201 });
}
@@ -54,7 +54,7 @@ export async function DELETE(request: NextRequest) {
return NextResponse.json({ error: "id is required" }, { status: 400 });
}
const deleted = deleteCredential(id);
const deleted = await deleteCredential(id);
if (!deleted) {
return NextResponse.json({ error: "not found" }, { status: 404 });
}

View File

@@ -6,7 +6,7 @@ export async function GET(
{ params }: { params: Promise<{ id: string }> },
) {
const { id } = await params;
const task = getTask(id);
const task = await getTask(id);
if (!task) {
return NextResponse.json({ error: "Task not found" }, { status: 404 });
}
@@ -19,7 +19,7 @@ export async function PATCH(
) {
const { id } = await params;
const body = await request.json();
const updated = updateTask(id, body);
const updated = await updateTask(id, body);
if (!updated) {
return NextResponse.json({ error: "Task not found" }, { status: 404 });
}

View File

@@ -7,7 +7,7 @@ export async function POST(
{ params }: { params: Promise<{ id: string }> },
) {
const { id } = await params;
const task = getTask(id);
const task = await getTask(id);
if (!task) {
return NextResponse.json({ error: "Task not found" }, { status: 404 });
@@ -20,7 +20,6 @@ export async function POST(
);
}
// Ensure orchestrator is running — it will pick up this task
startOrchestrator();
return NextResponse.json({ ok: true, message: "Orchestrator started, task will be picked up" });

View File

@@ -7,7 +7,7 @@ export async function POST(
{ params }: { params: Promise<{ id: string }> },
) {
const { id } = await params;
const task = getTask(id);
const task = await getTask(id);
if (!task) {
return NextResponse.json({ error: "Task not found" }, { status: 404 });

View File

@@ -4,7 +4,7 @@ import { getAgentConfig } from "@/lib/agents";
import { Task, TaskSpec } from "@/lib/types";
export async function GET() {
return NextResponse.json(getAllTasks());
return NextResponse.json(await getAllTasks());
}
export async function POST(request: NextRequest) {
@@ -18,7 +18,7 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: "agentId is required" }, { status: 400 });
}
const agentConfig = getAgentConfig(spec.agentId);
const agentConfig = await getAgentConfig(spec.agentId);
if (!agentConfig) {
return NextResponse.json(
{ error: `Agent config not found: ${spec.agentId}` },
@@ -40,6 +40,6 @@ export async function POST(request: NextRequest) {
spec,
};
const created = createTask(task);
const created = await createTask(task);
return NextResponse.json(created, { status: 201 });
}

View File

@@ -1,4 +1,6 @@
// Agent runtime definitions and configuration
import { eq } from "drizzle-orm";
import { db } from "./db";
import { agentConfigs as agentTable } from "@homelab/db";
export type AgentRuntime = "claude-code" | "codex" | "opencode";
@@ -6,11 +8,11 @@ export interface AgentRuntimeInfo {
id: AgentRuntime;
name: string;
description: string;
defaultProviders: string[]; // which AI providers this runtime supports
cliCommand: string; // base CLI command
headlessFlag: string; // flag to run headless
modelFlag: string; // flag to specify model
promptFlag: string; // flag to pass the prompt/task
defaultProviders: string[];
cliCommand: string;
headlessFlag: string;
modelFlag: string;
promptFlag: string;
}
export const AGENT_RUNTIMES: Record<AgentRuntime, AgentRuntimeInfo> = {
@@ -32,7 +34,7 @@ export const AGENT_RUNTIMES: Record<AgentRuntime, AgentRuntimeInfo> = {
cliCommand: "codex",
headlessFlag: "--quiet",
modelFlag: "--model",
promptFlag: "", // prompt is positional
promptFlag: "",
},
"opencode": {
id: "opencode",
@@ -40,9 +42,9 @@ export const AGENT_RUNTIMES: Record<AgentRuntime, AgentRuntimeInfo> = {
description: "Open-source multi-provider coding agent. Supports Anthropic, OpenAI, Google, OpenRouter.",
defaultProviders: ["anthropic", "openai", "google", "openrouter", "opencode-zen"],
cliCommand: "opencode",
headlessFlag: "run", // subcommand, not a flag
headlessFlag: "run",
modelFlag: "--model",
promptFlag: "", // prompt is positional (like codex)
promptFlag: "",
},
};
@@ -55,34 +57,61 @@ export interface AgentConfig {
modelId: string;
provider: string;
maxTokens?: number;
env?: Record<string, string>; // additional env vars for the agent process
env?: Record<string, string>;
}
// Shared via globalThis to survive Next.js module re-bundling.
const g = globalThis as unknown as { __harnessAgentConfigs?: Map<string, AgentConfig> };
g.__harnessAgentConfigs ??= new Map();
const configs = g.__harnessAgentConfigs;
export function getAllAgentConfigs(): AgentConfig[] {
return Array.from(configs.values());
function rowToConfig(row: typeof agentTable.$inferSelect): AgentConfig {
return {
id: row.id,
name: row.name,
runtime: row.runtime as AgentRuntime,
modelId: row.modelId,
provider: row.provider,
maxTokens: row.maxTokens ?? undefined,
env: row.env ?? undefined,
};
}
export function getAgentConfig(id: string): AgentConfig | undefined {
return configs.get(id);
export async function getAllAgentConfigs(): Promise<AgentConfig[]> {
const rows = await db.select().from(agentTable);
return rows.map(rowToConfig);
}
export function upsertAgentConfig(config: AgentConfig): AgentConfig {
configs.set(config.id, config);
export async function getAgentConfig(id: string): Promise<AgentConfig | undefined> {
const [row] = await db.select().from(agentTable).where(eq(agentTable.id, id));
return row ? rowToConfig(row) : undefined;
}
export async function upsertAgentConfig(config: AgentConfig): Promise<AgentConfig> {
await db.insert(agentTable).values({
id: config.id,
name: config.name,
runtime: config.runtime,
modelId: config.modelId,
provider: config.provider,
maxTokens: config.maxTokens,
env: config.env,
}).onConflictDoUpdate({
target: agentTable.id,
set: {
name: config.name,
runtime: config.runtime,
modelId: config.modelId,
provider: config.provider,
maxTokens: config.maxTokens,
env: config.env,
updatedAt: new Date(),
},
});
return config;
}
export function deleteAgentConfig(id: string): boolean {
return configs.delete(id);
export async function deleteAgentConfig(id: string): Promise<boolean> {
const result = await db.delete(agentTable).where(eq(agentTable.id, id));
return (result as unknown as { rowCount: number }).rowCount > 0;
}
// ─── CLI BUILDER ────────────────────────────────────────────
// Builds the shell command to invoke an agent headlessly.
export function buildAgentCommand(config: AgentConfig, prompt: string, workDir: string): string[] {
const runtime = AGENT_RUNTIMES[config.runtime];
@@ -94,7 +123,6 @@ export function buildAgentCommand(config: AgentConfig, prompt: string, workDir:
if (runtime.promptFlag) {
args.push(runtime.promptFlag, prompt);
} else {
// positional prompt (codex)
args.push(prompt);
}

View File

@@ -7,11 +7,7 @@ import { upsertCuratedModel, getCuratedModels } from "./model-store";
import { upsertAgentConfig, getAllAgentConfigs, type AgentRuntime } from "./agents";
import { fetchAllModels } from "./model-providers";
const gb = globalThis as unknown as { __harnessBooted?: boolean };
// Well-known models with pricing (used as fallback when API discovery returns
// models without pricing info, and to enable cost tracking from the start).
// Well-known models with pricing
const KNOWN_MODELS: Record<string, { name: string; provider: string; contextWindow: number; costPer1kInput: number; costPer1kOutput: number }> = {
"claude-opus-4-20250514": { name: "Claude Opus 4", provider: "anthropic", contextWindow: 200000, costPer1kInput: 0.015, costPer1kOutput: 0.075 },
"claude-sonnet-4-20250514": { name: "Claude Sonnet 4", provider: "anthropic", contextWindow: 200000, costPer1kInput: 0.003, costPer1kOutput: 0.015 },
@@ -25,7 +21,6 @@ const KNOWN_MODELS: Record<string, { name: string; provider: string; contextWind
};
// Default agents to create per provider when credentials are available.
// Maps provider → [{ runtime, models[] }].
const DEFAULT_AGENTS: Record<string, { runtime: AgentRuntime; models: string[] }[]> = {
anthropic: [
{ runtime: "claude-code", models: ["claude-sonnet-4-20250514", "claude-opus-4-20250514"] },
@@ -50,7 +45,10 @@ const RUNTIME_LABELS: Record<AgentRuntime, string> = {
// ─── CREDENTIAL LOADING ─────────────────────────────────────
function loadCredentialsFromEnv() {
// Read credential files and env vars, upsert into the database.
// Uses ON CONFLICT DO UPDATE so restarts are idempotent.
async function loadCredentialsFromEnv() {
const envMap: [string, Provider, string][] = [
["ANTHROPIC_API_KEY", "anthropic", "Anthropic (env)"],
["OPENAI_API_KEY", "openai", "OpenAI (env)"],
@@ -65,13 +63,13 @@ function loadCredentialsFromEnv() {
for (const [envVar, provider, label] of envMap) {
const token = process.env[envVar];
if (!token) continue;
// Don't overwrite if already loaded from file
if (getRawCredentialsByProvider(provider).length > 0) continue;
upsertCredential({ id: `env-${provider}`, provider, label, token });
const existing = await getRawCredentialsByProvider(provider);
if (existing.length > 0) continue;
await upsertCredential({ id: `env-${provider}`, provider, label, token });
}
}
function loadClaudeCredentials() {
async function loadClaudeCredentials() {
const configDir = process.env.CLAUDE_CONFIG_DIR;
if (!configDir) return;
@@ -80,10 +78,8 @@ function loadClaudeCredentials() {
try {
const raw = JSON.parse(readFileSync(credPath, "utf-8"));
// Claude Code OAuth credentials → extract access token for Anthropic API
if (raw.claudeAiOauth?.accessToken) {
upsertCredential({
await upsertCredential({
id: "file-anthropic",
provider: "anthropic",
label: `Claude ${raw.claudeAiOauth.subscriptionType || "API"} (mounted)`,
@@ -95,7 +91,7 @@ function loadClaudeCredentials() {
}
}
function loadOpenCodeCredentials() {
async function loadOpenCodeCredentials() {
const configDir = process.env.OPENCODE_CONFIG_DIR;
if (!configDir) return;
@@ -104,8 +100,6 @@ function loadOpenCodeCredentials() {
try {
const raw = JSON.parse(readFileSync(authPath, "utf-8"));
// OpenCode auth.json: { "provider": { "type": "api", "key": "..." } }
const providerMap: Record<string, Provider> = {
anthropic: "anthropic",
openai: "openai",
@@ -119,9 +113,9 @@ function loadOpenCodeCredentials() {
if (!provider || typeof entry !== "object" || !entry) continue;
const token = (entry as Record<string, unknown>).key;
if (typeof token !== "string" || !token) continue;
// Don't overwrite credentials already loaded from Claude config
if (getRawCredentialsByProvider(provider).length > 0) continue;
upsertCredential({
const existing = await getRawCredentialsByProvider(provider);
if (existing.length > 0) continue;
await upsertCredential({
id: `file-${provider}`,
provider,
label: `${key} (mounted)`,
@@ -136,13 +130,11 @@ function loadOpenCodeCredentials() {
// ─── MODEL + AGENT AUTO-DISCOVERY ───────────────────────────
async function discoverModelsAndAgents() {
// Fetch live models from all providers with credentials
const liveModels = await fetchAllModels();
// Upsert discovered models into curated store, enriched with known pricing
for (const m of liveModels) {
const known = KNOWN_MODELS[m.id];
upsertCuratedModel({
await upsertCuratedModel({
id: m.id,
name: known?.name || m.name,
provider: m.provider,
@@ -153,25 +145,29 @@ async function discoverModelsAndAgents() {
});
}
// Also add well-known models that have credentials but weren't returned by API
// (e.g. newer models not yet in /v1/models listing)
// Add well-known models with credentials that weren't in the API listing
const allModels = await getCuratedModels();
const existingIds = new Set(allModels.map(m => m.id));
for (const [id, info] of Object.entries(KNOWN_MODELS)) {
if (getCuratedModels().some(m => m.id === id)) continue;
if (getRawCredentialsByProvider(info.provider as Provider).length === 0) continue;
upsertCuratedModel({ id, ...info, enabled: true });
if (existingIds.has(id)) continue;
const creds = await getRawCredentialsByProvider(info.provider as Provider);
if (creds.length === 0) continue;
await upsertCuratedModel({ id, ...info, enabled: true });
}
// Create default agent configs if none exist yet
if (getAllAgentConfigs().length > 0) return;
// Create default agent configs if none exist
const existingAgents = await getAllAgentConfigs();
if (existingAgents.length > 0) return;
for (const [provider, runtimes] of Object.entries(DEFAULT_AGENTS)) {
if (getRawCredentialsByProvider(provider as Provider).length === 0) continue;
const creds = await getRawCredentialsByProvider(provider as Provider);
if (creds.length === 0) continue;
for (const { runtime, models } of runtimes) {
for (const modelId of models) {
const known = KNOWN_MODELS[modelId];
const name = `${RUNTIME_LABELS[runtime]} · ${known?.name || modelId}`;
const id = `auto-${runtime}-${modelId}`.replace(/[^a-z0-9-]/g, "-");
upsertAgentConfig({ id, name, runtime, modelId, provider });
await upsertAgentConfig({ id, name, runtime, modelId, provider });
}
}
}
@@ -180,18 +176,15 @@ async function discoverModelsAndAgents() {
// ─── BOOT ───────────────────────────────────────────────────
export async function boot() {
if (gb.__harnessBooted) return;
gb.__harnessBooted = true;
// 1. Load credentials from mounted secrets (files take priority)
loadClaudeCredentials();
loadOpenCodeCredentials();
await loadClaudeCredentials();
await loadOpenCodeCredentials();
// 2. Fill gaps from env vars
loadCredentialsFromEnv();
// 3. Discover models and create agents (async, best-effort)
await loadCredentialsFromEnv();
// 3. Discover models and create agents (best-effort)
try {
await discoverModelsAndAgents();
} catch {
// non-fatal — models/agents will be empty until manually configured
} catch (err) {
console.error("[boot] Model/agent discovery failed:", err);
}
}

View File

@@ -1,3 +1,7 @@
import { eq } from "drizzle-orm";
import { db } from "./db";
import { credentials as credentialsTable } from "@homelab/db";
export type Provider =
| "github" | "gitlab"
| "anthropic" | "openai" | "openrouter" | "google" | "opencode-zen";
@@ -10,50 +14,73 @@ export interface Credential {
provider: Provider;
label: string;
token: string;
baseUrl?: string; // for self-hosted GitLab or custom endpoints
}
// In-memory store shared via globalThis to survive Next.js module re-bundling.
const g = globalThis as unknown as { __harnessCredentials?: Map<string, Credential> };
g.__harnessCredentials ??= new Map();
const credentials = g.__harnessCredentials;
export function getAllCredentials(): Credential[] {
return Array.from(credentials.values()).map(c => ({
...c,
token: maskToken(c.token),
}));
}
export function getCredentialsByKind(kind: "git" | "ai"): Credential[] {
const providers = kind === "git" ? GIT_PROVIDERS : AI_PROVIDERS;
return Array.from(credentials.values())
.filter(c => providers.includes(c.provider))
.map(c => ({ ...c, token: maskToken(c.token) }));
}
export function getCredential(id: string): Credential | undefined {
return credentials.get(id);
}
export function getCredentialsByProvider(provider: Provider): Credential[] {
return Array.from(credentials.values()).filter(c => c.provider === provider);
}
export function getRawCredentialsByProvider(provider: Provider): Credential[] {
return Array.from(credentials.values()).filter(c => c.provider === provider);
}
export function upsertCredential(cred: Credential): Credential {
credentials.set(cred.id, cred);
return { ...cred, token: maskToken(cred.token) };
}
export function deleteCredential(id: string): boolean {
return credentials.delete(id);
baseUrl?: string;
}
function maskToken(token: string): string {
if (token.length <= 8) return "••••••••";
return token.slice(0, 4) + "••••" + token.slice(-4);
}
function rowToCredential(row: typeof credentialsTable.$inferSelect): Credential {
return {
id: row.id,
provider: row.provider as Provider,
label: row.label,
token: row.token,
baseUrl: row.baseUrl ?? undefined,
};
}
export async function getAllCredentials(): Promise<Credential[]> {
const rows = await db.select().from(credentialsTable);
return rows.map(r => ({ ...rowToCredential(r), token: maskToken(r.token) }));
}
export async function getCredentialsByKind(kind: "git" | "ai"): Promise<Credential[]> {
const providers = kind === "git" ? GIT_PROVIDERS : AI_PROVIDERS;
const rows = await db.select().from(credentialsTable);
return rows
.filter(r => providers.includes(r.provider as Provider))
.map(r => ({ ...rowToCredential(r), token: maskToken(r.token) }));
}
export async function getCredential(id: string): Promise<Credential | undefined> {
const [row] = await db.select().from(credentialsTable).where(eq(credentialsTable.id, id));
return row ? rowToCredential(row) : undefined;
}
export async function getCredentialsByProvider(provider: Provider): Promise<Credential[]> {
const rows = await db.select().from(credentialsTable).where(eq(credentialsTable.provider, provider));
return rows.map(r => ({ ...rowToCredential(r), token: maskToken(r.token) }));
}
export async function getRawCredentialsByProvider(provider: Provider): Promise<Credential[]> {
const rows = await db.select().from(credentialsTable).where(eq(credentialsTable.provider, provider));
return rows.map(rowToCredential);
}
export async function upsertCredential(cred: Credential): Promise<Credential> {
await db.insert(credentialsTable).values({
id: cred.id,
provider: cred.provider,
label: cred.label,
token: cred.token,
baseUrl: cred.baseUrl,
}).onConflictDoUpdate({
target: credentialsTable.id,
set: {
provider: cred.provider,
label: cred.label,
token: cred.token,
baseUrl: cred.baseUrl,
updatedAt: new Date(),
},
});
return { ...cred, token: maskToken(cred.token) };
}
export async function deleteCredential(id: string): Promise<boolean> {
const result = await db.delete(credentialsTable).where(eq(credentialsTable.id, id));
return (result as unknown as { rowCount: number }).rowCount > 0;
}

View File

@@ -0,0 +1,25 @@
import { drizzle } from "drizzle-orm/postgres-js";
import postgres from "postgres";
import * as schema from "@homelab/db";
type DB = ReturnType<typeof drizzle>;
const g = globalThis as unknown as { __harnessDb?: DB };
function getDb(): DB {
if (!g.__harnessDb) {
const connectionString = process.env.DATABASE_URL;
if (!connectionString) {
throw new Error("DATABASE_URL is required");
}
const client = postgres(connectionString);
g.__harnessDb = drizzle(client, { schema });
}
return g.__harnessDb;
}
// Lazy proxy — connection is established on first query, not at import time.
export const db = new Proxy({} as DB, {
get(_target, prop) {
return (getDb() as unknown as Record<string | symbol, unknown>)[prop];
},
});

View File

@@ -5,7 +5,6 @@ import { ExecutionResult } from "./types";
const DEFAULT_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
// Maps AI providers to their env var names
const PROVIDER_ENV_VARS: Record<string, string> = {
anthropic: "ANTHROPIC_API_KEY",
openai: "OPENAI_API_KEY",
@@ -14,7 +13,6 @@ const PROVIDER_ENV_VARS: Record<string, string> = {
"opencode-zen": "OPENCODE_ZEN_API_KEY",
};
// Best-effort token extraction regexes per runtime
const TOKEN_PATTERNS: Record<string, { input: RegExp; output: RegExp }> = {
"claude-code": {
input: /input[_\s]tokens?[:\s]+(\d[\d,]*)/i,
@@ -43,7 +41,7 @@ export async function executeAgent(opts: {
timeoutMs?: number;
signal?: AbortSignal;
}): Promise<ExecutionResult> {
const config = getAgentConfig(opts.agentId);
const config = await getAgentConfig(opts.agentId);
if (!config) {
throw new Error(`Agent config not found: ${opts.agentId}`);
}
@@ -52,11 +50,8 @@ export async function executeAgent(opts: {
const command = args[0];
const commandArgs = args.slice(1);
// Build environment with credentials
const env: NodeJS.ProcessEnv = { ...process.env };
// Set API keys — OpenCode is multi-provider so inject all available keys;
// other runtimes only need their configured provider's key.
const providersToInject =
config.runtime === "opencode"
? Object.keys(PROVIDER_ENV_VARS)
@@ -65,20 +60,18 @@ export async function executeAgent(opts: {
for (const provider of providersToInject) {
const envVar = PROVIDER_ENV_VARS[provider];
if (!envVar) continue;
const creds = getRawCredentialsByProvider(provider as Provider);
const creds = await getRawCredentialsByProvider(provider as Provider);
if (creds.length > 0) {
env[envVar] = creds[0].token;
}
}
// Set GitHub token for git operations within agent
const ghCreds = getRawCredentialsByProvider("github" as Provider);
const ghCreds = await getRawCredentialsByProvider("github" as Provider);
if (ghCreds.length > 0) {
env.GITHUB_TOKEN = ghCreds[0].token;
env.GH_TOKEN = ghCreds[0].token;
}
// Add any custom env from agent config
if (config.env) {
Object.assign(env, config.env);
}
@@ -105,14 +98,12 @@ export async function executeAgent(opts: {
stderr += chunk.toString();
});
// Timeout
const timer = setTimeout(() => {
killed = true;
child.kill("SIGTERM");
setTimeout(() => child.kill("SIGKILL"), 5000);
}, timeout);
// Cancellation via AbortSignal
const onAbort = () => {
killed = true;
child.kill("SIGTERM");

View File

@@ -19,7 +19,7 @@ export async function fetchAllModels(): Promise<ModelInfo[]> {
}
async function fetchAnthropicModels(): Promise<ModelInfo[]> {
const creds = getRawCredentialsByProvider("anthropic");
const creds = await getRawCredentialsByProvider("anthropic");
if (creds.length === 0) return [];
for (const cred of creds) {
@@ -48,7 +48,7 @@ async function fetchAnthropicModels(): Promise<ModelInfo[]> {
}
async function fetchOpenAIModels(): Promise<ModelInfo[]> {
const creds = getRawCredentialsByProvider("openai");
const creds = await getRawCredentialsByProvider("openai");
if (creds.length === 0) return [];
for (const cred of creds) {
@@ -79,7 +79,7 @@ async function fetchOpenAIModels(): Promise<ModelInfo[]> {
}
async function fetchOpenRouterModels(): Promise<ModelInfo[]> {
const creds = getRawCredentialsByProvider("openrouter");
const creds = await getRawCredentialsByProvider("openrouter");
if (creds.length === 0) return [];
for (const cred of creds) {
@@ -106,7 +106,7 @@ async function fetchOpenRouterModels(): Promise<ModelInfo[]> {
}
async function fetchGoogleModels(): Promise<ModelInfo[]> {
const creds = getRawCredentialsByProvider("google");
const creds = await getRawCredentialsByProvider("google");
if (creds.length === 0) return [];
for (const cred of creds) {

View File

@@ -1,4 +1,6 @@
// Curated model list and usage tracking
import { eq, sql } from "drizzle-orm";
import { db } from "./db";
import { curatedModels as modelsTable, modelUsage as usageTable } from "@homelab/db";
export interface CuratedModel {
id: string;
@@ -6,8 +8,8 @@ export interface CuratedModel {
provider: string;
enabled: boolean;
contextWindow?: number;
costPer1kInput?: number; // USD per 1k input tokens
costPer1kOutput?: number; // USD per 1k output tokens
costPer1kInput?: number;
costPer1kOutput?: number;
}
export interface ModelUsageEntry {
@@ -32,69 +34,122 @@ export interface ModelUsageSummary {
totalDurationMs: number;
}
// In-memory stores shared via globalThis to survive Next.js module re-bundling.
const g = globalThis as unknown as { __harnessCuratedModels?: Map<string, CuratedModel>; __harnessUsageLog?: ModelUsageEntry[] };
g.__harnessCuratedModels ??= new Map();
g.__harnessUsageLog ??= [];
const curatedModels = g.__harnessCuratedModels;
const usageLog = g.__harnessUsageLog;
function rowToModel(row: typeof modelsTable.$inferSelect): CuratedModel {
return {
id: row.id,
name: row.name,
provider: row.provider,
enabled: row.enabled,
contextWindow: row.contextWindow ?? undefined,
costPer1kInput: row.costPer1kInput ?? undefined,
costPer1kOutput: row.costPer1kOutput ?? undefined,
};
}
// ─── CURATED MODELS ─────────────────────────────────────────
export function getCuratedModels(): CuratedModel[] {
return Array.from(curatedModels.values());
export async function getCuratedModels(): Promise<CuratedModel[]> {
const rows = await db.select().from(modelsTable);
return rows.map(rowToModel);
}
export function getEnabledModels(): CuratedModel[] {
return Array.from(curatedModels.values()).filter(m => m.enabled);
export async function getEnabledModels(): Promise<CuratedModel[]> {
const rows = await db.select().from(modelsTable).where(eq(modelsTable.enabled, true));
return rows.map(rowToModel);
}
export function upsertCuratedModel(model: CuratedModel): CuratedModel {
curatedModels.set(model.id, model);
export async function upsertCuratedModel(model: CuratedModel): Promise<CuratedModel> {
await db.insert(modelsTable).values({
id: model.id,
name: model.name,
provider: model.provider,
enabled: model.enabled,
contextWindow: model.contextWindow,
costPer1kInput: model.costPer1kInput,
costPer1kOutput: model.costPer1kOutput,
}).onConflictDoUpdate({
target: modelsTable.id,
set: {
name: model.name,
provider: model.provider,
enabled: model.enabled,
contextWindow: model.contextWindow,
costPer1kInput: model.costPer1kInput,
costPer1kOutput: model.costPer1kOutput,
updatedAt: new Date(),
},
});
return model;
}
export function removeCuratedModel(id: string): boolean {
return curatedModels.delete(id);
export async function removeCuratedModel(id: string): Promise<boolean> {
const result = await db.delete(modelsTable).where(eq(modelsTable.id, id));
return (result as unknown as { rowCount: number }).rowCount > 0;
}
export function toggleModelEnabled(id: string): CuratedModel | undefined {
const model = curatedModels.get(id);
if (!model) return undefined;
model.enabled = !model.enabled;
curatedModels.set(id, model);
return model;
export async function toggleModelEnabled(id: string): Promise<CuratedModel | undefined> {
const [row] = await db.select().from(modelsTable).where(eq(modelsTable.id, id));
if (!row) return undefined;
const newEnabled = !row.enabled;
await db.update(modelsTable).set({ enabled: newEnabled, updatedAt: new Date() }).where(eq(modelsTable.id, id));
return rowToModel({ ...row, enabled: newEnabled });
}
export function updateModelCost(id: string, costPer1kInput: number, costPer1kOutput: number): CuratedModel | undefined {
const model = curatedModels.get(id);
if (!model) return undefined;
model.costPer1kInput = costPer1kInput;
model.costPer1kOutput = costPer1kOutput;
curatedModels.set(id, model);
return model;
export async function updateModelCost(id: string, costPer1kInput: number, costPer1kOutput: number): Promise<CuratedModel | undefined> {
const [row] = await db.select().from(modelsTable).where(eq(modelsTable.id, id));
if (!row) return undefined;
await db.update(modelsTable).set({ costPer1kInput, costPer1kOutput, updatedAt: new Date() }).where(eq(modelsTable.id, id));
return rowToModel({ ...row, costPer1kInput, costPer1kOutput });
}
// ─── USAGE TRACKING ─────────────────────────────────────────
export function recordUsage(entry: ModelUsageEntry): void {
usageLog.push(entry);
export async function recordUsage(entry: ModelUsageEntry): Promise<void> {
await db.insert(usageTable).values({
modelId: entry.modelId,
provider: entry.provider,
taskId: entry.taskId,
taskSlug: entry.taskSlug,
iteration: entry.iteration,
inputTokens: entry.inputTokens,
outputTokens: entry.outputTokens,
durationMs: entry.durationMs,
timestamp: entry.timestamp,
});
}
export function getUsageLog(): ModelUsageEntry[] {
return [...usageLog];
export async function getUsageLog(): Promise<ModelUsageEntry[]> {
const rows = await db.select().from(usageTable);
return rows.map(r => ({
modelId: r.modelId,
provider: r.provider,
taskId: r.taskId,
taskSlug: r.taskSlug,
iteration: r.iteration,
inputTokens: r.inputTokens,
outputTokens: r.outputTokens,
durationMs: r.durationMs,
timestamp: r.timestamp,
}));
}
export function getUsageSummary(): ModelUsageSummary[] {
export async function getUsageSummary(): Promise<ModelUsageSummary[]> {
// Fetch usage + model cost data, aggregate in JS to match prior behavior
const [usage, models] = await Promise.all([
db.select().from(usageTable),
db.select().from(modelsTable),
]);
const modelMap = new Map(models.map(m => [m.id, m]));
const grouped = new Map<string, ModelUsageSummary>();
for (const entry of usageLog) {
for (const entry of usage) {
const key = `${entry.provider}:${entry.modelId}`;
const existing = grouped.get(key);
const model = curatedModels.get(entry.modelId);
const model = modelMap.get(entry.modelId);
const inputCost = model?.costPer1kInput ? (entry.inputTokens / 1000) * model.costPer1kInput : 0;
const outputCost = model?.costPer1kOutput ? (entry.outputTokens / 1000) * model.costPer1kOutput : 0;
const existing = grouped.get(key);
if (existing) {
existing.totalInputTokens += entry.inputTokens;
existing.totalOutputTokens += entry.outputTokens;
@@ -116,4 +171,3 @@ export function getUsageSummary(): ModelUsageSummary[] {
return Array.from(grouped.values()).sort((a, b) => b.totalCost - a.totalCost);
}

View File

@@ -43,15 +43,13 @@ export function startOrchestrator(): void {
if (running) return;
running = true;
// Mark any crashed running tasks as failed on startup
recoverCrashedTasks();
pollTimer = setInterval(() => {
if (currentTaskId) return; // already processing a task
if (currentTaskId) return;
poll();
}, POLL_INTERVAL_MS);
// Immediate first poll
poll();
}
@@ -69,18 +67,20 @@ export function cancelTask(taskId: string): boolean {
return true;
}
function recoverCrashedTasks(): void {
const runningTasks = getRunningTasks();
async function recoverCrashedTasks(): Promise<void> {
const runningTasks = await getRunningTasks();
for (const task of runningTasks) {
// Mark running iterations as failed
const updatedIterations = task.iterations.map((iter) =>
iter.status === "running"
? { ...iter, status: "failed" as const, diagnosis: "Interrupted — server restarted", completedAt: Date.now() }
: iter,
);
updateTask(task.id, {
for (const iter of task.iterations) {
if (iter.status === "running") {
await updateIteration(task.id, iter.n, {
status: "failed",
diagnosis: "Interrupted — server restarted",
completedAt: Date.now(),
});
}
}
await updateTask(task.id, {
status: "failed",
iterations: updatedIterations,
completedAt: Date.now(),
});
}
@@ -89,7 +89,7 @@ function recoverCrashedTasks(): void {
async function poll(): Promise<void> {
if (!running || currentTaskId) return;
const task = getFirstPendingTask();
const task = await getFirstPendingTask();
if (!task) return;
currentTaskId = task.id;
@@ -99,7 +99,7 @@ async function poll(): Promise<void> {
await runTask(task);
} catch (err) {
console.error(`[orchestrator] Task ${task.id} failed with error:`, err);
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
@@ -110,20 +110,19 @@ async function poll(): Promise<void> {
}
async function runTask(task: Task): Promise<void> {
const agentConfig = getAgentConfig(task.spec.agentId);
const agentConfig = await getAgentConfig(task.spec.agentId);
if (!agentConfig) {
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
return;
}
// Determine git credentials and repo URL
const gitCreds = getRawCredentialsByProvider("github");
const gitCreds = await getRawCredentialsByProvider("github");
const gitToken = gitCreds[0]?.token;
if (!gitToken) {
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
@@ -132,18 +131,17 @@ async function runTask(task: Task): Promise<void> {
const repoUrl = buildAuthenticatedCloneUrl(task.project, "github", gitToken);
updateTask(task.id, {
await updateTask(task.id, {
status: "running",
startedAt: Date.now(),
});
// Ensure bare clone
let bareClone: string;
try {
bareClone = await ensureBareClone(repoUrl, task.slug);
} catch (err) {
console.error(`[orchestrator] Failed to clone repo for task ${task.id}:`, err);
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
@@ -155,7 +153,7 @@ async function runTask(task: Task): Promise<void> {
for (let n = 1; n <= task.maxIterations; n++) {
if (currentAbort?.signal.aborted) {
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
@@ -163,10 +161,7 @@ async function runTask(task: Task): Promise<void> {
}
const result = await runIteration(task, n, bareClone, branchName);
if (!result) {
// Iteration was cancelled or errored fatally
return;
}
if (!result) return;
if (result.allPassed) {
converged = true;
@@ -175,9 +170,9 @@ async function runTask(task: Task): Promise<void> {
}
if (converged) {
// Push and create PR
try {
const lastIterN = getTask(task.id)!.iteration;
const finalTask = await getTask(task.id);
const lastIterN = finalTask!.iteration;
const workDir = iterationDir(task.id, lastIterN);
await commitAll(workDir, `harness: ${task.goal}`);
@@ -191,30 +186,31 @@ async function runTask(task: Task): Promise<void> {
token: gitToken,
});
updateTask(task.id, {
await updateTask(task.id, {
status: "completed",
completedAt: Date.now(),
pr: { number: pr.number, title: `[harness] ${task.goal}`, status: "open" },
});
} catch (err) {
console.error(`[orchestrator] Failed to create PR for task ${task.id}:`, err);
updateTask(task.id, {
await updateTask(task.id, {
status: "completed",
completedAt: Date.now(),
});
}
} else {
updateTask(task.id, {
await updateTask(task.id, {
status: "failed",
completedAt: Date.now(),
});
}
// Cleanup worktrees
const finalTask = getTask(task.id)!;
const finalTask = await getTask(task.id);
if (finalTask) {
for (const iter of finalTask.iterations) {
await removeWorktree(bareClone, iterationDir(task.id, iter.n));
}
}
}
async function runIteration(
@@ -229,17 +225,16 @@ async function runIteration(
diagnosis: null,
startedAt: Date.now(),
};
appendIteration(task.id, iteration);
await appendIteration(task.id, iteration);
const workDir = iterationDir(task.id, n);
try {
// Create worktree — first iteration gets a new branch, subsequent reuse it
const branchForWorktree = n === 1 ? branchName : `${branchName}-iter-${n}`;
await createWorktree(bareClone, workDir, branchForWorktree, "HEAD");
} catch (err) {
console.error(`[orchestrator] Failed to create worktree for iteration ${n}:`, err);
updateIteration(task.id, n, {
await updateIteration(task.id, n, {
status: "failed",
diagnosis: `Failed to create worktree: ${err}`,
completedAt: Date.now(),
@@ -247,8 +242,7 @@ async function runIteration(
return null;
}
// Build prompt with prior iterations
const currentTask = getTask(task.id)!;
const currentTask = (await getTask(task.id))!;
const priorIterations = currentTask.iterations.filter((i) => i.n < n);
const prompt = await buildPrompt({
task: currentTask,
@@ -256,7 +250,6 @@ async function runIteration(
priorIterations,
});
// Execute agent
const execResult = await executeAgent({
agentId: task.spec.agentId,
prompt,
@@ -265,16 +258,15 @@ async function runIteration(
});
if (execResult.killed && currentAbort?.signal.aborted) {
updateIteration(task.id, n, {
await updateIteration(task.id, n, {
status: "failed",
diagnosis: "Cancelled by user",
completedAt: Date.now(),
});
updateTask(task.id, { status: "failed", completedAt: Date.now() });
await updateTask(task.id, { status: "failed", completedAt: Date.now() });
return null;
}
// Evaluate
const evalResult = await evaluate({
task: currentTask,
iterationNumber: n,
@@ -283,10 +275,9 @@ async function runIteration(
workDir,
});
// Record usage
const agentConfig = getAgentConfig(task.spec.agentId);
const agentConfig = await getAgentConfig(task.spec.agentId);
if (agentConfig) {
recordUsage({
await recordUsage({
modelId: agentConfig.modelId,
provider: agentConfig.provider,
taskId: task.id,
@@ -299,18 +290,16 @@ async function runIteration(
});
}
// Update iteration
updateIteration(task.id, n, {
await updateIteration(task.id, n, {
status: evalResult.allPassed ? "passed" : "failed",
diagnosis: evalResult.diagnosis,
agentOutput: execResult.stdout.slice(-8000), // keep last 8k chars
agentOutput: execResult.stdout.slice(-8000),
evals: evalResult.evals,
diffStats: evalResult.diffStats,
completedAt: Date.now(),
});
// Update task-level evals
updateTask(task.id, { evals: evalResult.evals });
await updateTask(task.id, { evals: evalResult.evals });
return { allPassed: evalResult.allPassed };
}

View File

@@ -1,4 +1,4 @@
import { getCredentialsByProvider } from "./credentials";
import { getRawCredentialsByProvider } from "./credentials";
export interface RepoResult {
provider: "github" | "gitlab";
@@ -21,7 +21,7 @@ export async function searchRepos(query: string): Promise<RepoResult[]> {
}
async function searchGitHub(query: string): Promise<RepoResult[]> {
const creds = getCredentialsByProvider("github");
const creds = await getRawCredentialsByProvider("github");
if (creds.length === 0) return [];
const results: RepoResult[] = [];
@@ -61,7 +61,7 @@ async function searchGitHub(query: string): Promise<RepoResult[]> {
}
async function searchGitLab(query: string): Promise<RepoResult[]> {
const creds = getCredentialsByProvider("gitlab");
const creds = await getRawCredentialsByProvider("gitlab");
if (creds.length === 0) return [];
const results: RepoResult[] = [];

View File

@@ -1,62 +1,157 @@
import { Task } from "./types";
import { eq, and } from "drizzle-orm";
import { db } from "./db";
import { tasks as tasksTable, iterations as iterationsTable } from "@homelab/db";
import { Task, Iteration } from "./types";
// In-memory task store shared via globalThis to survive Next.js module re-bundling.
const g = globalThis as unknown as { __harnessTasks?: Map<string, Task> };
g.__harnessTasks ??= new Map();
const tasks = g.__harnessTasks;
export function getAllTasks(): Task[] {
return Array.from(tasks.values());
function rowToTask(
row: typeof tasksTable.$inferSelect,
iters: (typeof iterationsTable.$inferSelect)[],
): Task {
return {
id: row.id,
slug: row.slug,
goal: row.goal,
status: row.status as Task["status"],
iteration: row.iteration,
maxIterations: row.maxIterations,
startedAt: row.startedAt ?? null,
completedAt: row.completedAt ?? undefined,
project: row.project,
evals: (row.evals ?? {}) as Record<string, import("./types").Eval>,
pr: row.pr as Task["pr"] | undefined,
spec: row.spec as Task["spec"],
iterations: iters
.sort((a, b) => a.n - b.n)
.map(i => ({
n: i.n,
status: i.status as Iteration["status"],
diagnosis: i.diagnosis,
agentOutput: i.agentOutput ?? undefined,
evals: i.evals as Iteration["evals"],
diffStats: i.diffStats ?? undefined,
startedAt: i.startedAt ?? undefined,
completedAt: i.completedAt ?? undefined,
})),
};
}
export function getTask(id: string): Task | undefined {
return tasks.get(id);
export async function getAllTasks(): Promise<Task[]> {
const [taskRows, iterRows] = await Promise.all([
db.select().from(tasksTable),
db.select().from(iterationsTable),
]);
const itersByTask = new Map<string, (typeof iterationsTable.$inferSelect)[]>();
for (const iter of iterRows) {
const arr = itersByTask.get(iter.taskId) ?? [];
arr.push(iter);
itersByTask.set(iter.taskId, arr);
}
return taskRows.map(t => rowToTask(t, itersByTask.get(t.id) ?? []));
}
export function createTask(task: Task): Task {
tasks.set(task.id, task);
export async function getTask(id: string): Promise<Task | undefined> {
const [taskRow] = await db.select().from(tasksTable).where(eq(tasksTable.id, id));
if (!taskRow) return undefined;
const iters = await db.select().from(iterationsTable).where(eq(iterationsTable.taskId, id));
return rowToTask(taskRow, iters);
}
export async function createTask(task: Task): Promise<Task> {
await db.insert(tasksTable).values({
id: task.id,
slug: task.slug,
goal: task.goal,
status: task.status,
iteration: task.iteration,
maxIterations: task.maxIterations,
startedAt: task.startedAt,
completedAt: task.completedAt,
project: task.project,
evals: task.evals,
pr: task.pr,
spec: task.spec,
});
return task;
}
export function updateTask(id: string, updates: Partial<Task>): Task | undefined {
const existing = tasks.get(id);
export async function updateTask(id: string, updates: Partial<Task>): Promise<Task | undefined> {
const existing = await getTask(id);
if (!existing) return undefined;
const updated = { ...existing, ...updates };
tasks.set(id, updated);
return updated;
const dbUpdates: Record<string, unknown> = { updatedAt: new Date() };
if (updates.slug !== undefined) dbUpdates.slug = updates.slug;
if (updates.goal !== undefined) dbUpdates.goal = updates.goal;
if (updates.status !== undefined) dbUpdates.status = updates.status;
if (updates.iteration !== undefined) dbUpdates.iteration = updates.iteration;
if (updates.maxIterations !== undefined) dbUpdates.maxIterations = updates.maxIterations;
if (updates.startedAt !== undefined) dbUpdates.startedAt = updates.startedAt;
if (updates.completedAt !== undefined) dbUpdates.completedAt = updates.completedAt;
if (updates.project !== undefined) dbUpdates.project = updates.project;
if (updates.evals !== undefined) dbUpdates.evals = updates.evals;
if (updates.pr !== undefined) dbUpdates.pr = updates.pr;
await db.update(tasksTable).set(dbUpdates).where(eq(tasksTable.id, id));
return getTask(id);
}
export function deleteTask(id: string): boolean {
return tasks.delete(id);
export async function deleteTask(id: string): Promise<boolean> {
await db.delete(iterationsTable).where(eq(iterationsTable.taskId, id));
const result = await db.delete(tasksTable).where(eq(tasksTable.id, id));
return (result as unknown as { rowCount: number }).rowCount > 0;
}
export function appendIteration(id: string, iteration: import("./types").Iteration): Task | undefined {
const existing = tasks.get(id);
export async function appendIteration(id: string, iteration: Iteration): Promise<Task | undefined> {
const existing = await getTask(id);
if (!existing) return undefined;
existing.iterations = [...existing.iterations, iteration];
existing.iteration = iteration.n;
tasks.set(id, existing);
return existing;
await db.insert(iterationsTable).values({
taskId: id,
n: iteration.n,
status: iteration.status,
diagnosis: iteration.diagnosis,
agentOutput: iteration.agentOutput,
evals: iteration.evals as Record<string, unknown>,
diffStats: iteration.diffStats,
startedAt: iteration.startedAt,
completedAt: iteration.completedAt,
});
await db.update(tasksTable).set({ iteration: iteration.n, updatedAt: new Date() }).where(eq(tasksTable.id, id));
return getTask(id);
}
export function updateIteration(
export async function updateIteration(
id: string,
iterationN: number,
updates: Partial<import("./types").Iteration>,
): Task | undefined {
const existing = tasks.get(id);
updates: Partial<Iteration>,
): Promise<Task | undefined> {
const existing = await getTask(id);
if (!existing) return undefined;
existing.iterations = existing.iterations.map((iter) =>
iter.n === iterationN ? { ...iter, ...updates } : iter,
);
tasks.set(id, existing);
return existing;
const dbUpdates: Partial<typeof iterationsTable.$inferInsert> = {};
if (updates.status !== undefined) dbUpdates.status = updates.status;
if (updates.diagnosis !== undefined) dbUpdates.diagnosis = updates.diagnosis;
if (updates.agentOutput !== undefined) dbUpdates.agentOutput = updates.agentOutput;
if (updates.evals !== undefined) dbUpdates.evals = updates.evals as Record<string, unknown>;
if (updates.diffStats !== undefined) dbUpdates.diffStats = updates.diffStats;
if (updates.startedAt !== undefined) dbUpdates.startedAt = updates.startedAt;
if (updates.completedAt !== undefined) dbUpdates.completedAt = updates.completedAt;
await db.update(iterationsTable)
.set(dbUpdates)
.where(and(eq(iterationsTable.taskId, id), eq(iterationsTable.n, iterationN)));
return getTask(id);
}
export function getFirstPendingTask(): Task | undefined {
return Array.from(tasks.values()).find((t) => t.status === "pending");
export async function getFirstPendingTask(): Promise<Task | undefined> {
const [row] = await db.select().from(tasksTable).where(eq(tasksTable.status, "pending")).limit(1);
if (!row) return undefined;
return getTask(row.id);
}
export function getRunningTasks(): Task[] {
return Array.from(tasks.values()).filter((t) => t.status === "running");
export async function getRunningTasks(): Promise<Task[]> {
const rows = await db.select().from(tasksTable).where(eq(tasksTable.status, "running"));
const results = await Promise.all(rows.map(r => getTask(r.id)));
return results.filter((t): t is Task => t !== undefined);
}

View File

@@ -0,0 +1,85 @@
CREATE TABLE IF NOT EXISTS "harness_agent_configs" (
"id" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"runtime" text NOT NULL,
"model_id" text NOT NULL,
"provider" text NOT NULL,
"max_tokens" integer,
"env" jsonb,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "harness_credentials" (
"id" text PRIMARY KEY NOT NULL,
"provider" text NOT NULL,
"label" text NOT NULL,
"token" text NOT NULL,
"base_url" text,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "harness_curated_models" (
"id" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"provider" text NOT NULL,
"enabled" boolean DEFAULT true NOT NULL,
"context_window" integer,
"cost_per_1k_input" real,
"cost_per_1k_output" real,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "harness_iterations" (
"id" serial PRIMARY KEY NOT NULL,
"task_id" text NOT NULL,
"n" integer NOT NULL,
"status" text DEFAULT 'pending' NOT NULL,
"diagnosis" text,
"agent_output" text,
"evals" jsonb,
"diff_stats" text,
"started_at" bigint,
"completed_at" bigint
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "harness_model_usage" (
"id" serial PRIMARY KEY NOT NULL,
"model_id" text NOT NULL,
"provider" text NOT NULL,
"task_id" text NOT NULL,
"task_slug" text NOT NULL,
"iteration" integer NOT NULL,
"input_tokens" integer NOT NULL,
"output_tokens" integer NOT NULL,
"duration_ms" integer NOT NULL,
"timestamp" bigint NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "harness_tasks" (
"id" text PRIMARY KEY NOT NULL,
"slug" text NOT NULL,
"goal" text NOT NULL,
"status" text DEFAULT 'pending' NOT NULL,
"iteration" integer DEFAULT 0 NOT NULL,
"max_iterations" integer DEFAULT 6 NOT NULL,
"started_at" bigint,
"completed_at" bigint,
"project" text DEFAULT '' NOT NULL,
"evals" jsonb DEFAULT '{}'::jsonb NOT NULL,
"pr" jsonb,
"spec" jsonb NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "users" (
"id" serial PRIMARY KEY NOT NULL,
"email" text NOT NULL,
"name" text,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "users_email_unique" UNIQUE("email")
);

View File

@@ -0,0 +1,519 @@
{
"id": "629b632c-cf1f-46ff-bb2f-e47d2343ddbd",
"prevId": "00000000-0000-0000-0000-000000000000",
"version": "7",
"dialect": "postgresql",
"tables": {
"public.harness_agent_configs": {
"name": "harness_agent_configs",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true
},
"runtime": {
"name": "runtime",
"type": "text",
"primaryKey": false,
"notNull": true
},
"model_id": {
"name": "model_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"provider": {
"name": "provider",
"type": "text",
"primaryKey": false,
"notNull": true
},
"max_tokens": {
"name": "max_tokens",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"env": {
"name": "env",
"type": "jsonb",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.harness_credentials": {
"name": "harness_credentials",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"provider": {
"name": "provider",
"type": "text",
"primaryKey": false,
"notNull": true
},
"label": {
"name": "label",
"type": "text",
"primaryKey": false,
"notNull": true
},
"token": {
"name": "token",
"type": "text",
"primaryKey": false,
"notNull": true
},
"base_url": {
"name": "base_url",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.harness_curated_models": {
"name": "harness_curated_models",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true
},
"provider": {
"name": "provider",
"type": "text",
"primaryKey": false,
"notNull": true
},
"enabled": {
"name": "enabled",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": true
},
"context_window": {
"name": "context_window",
"type": "integer",
"primaryKey": false,
"notNull": false
},
"cost_per_1k_input": {
"name": "cost_per_1k_input",
"type": "real",
"primaryKey": false,
"notNull": false
},
"cost_per_1k_output": {
"name": "cost_per_1k_output",
"type": "real",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.harness_iterations": {
"name": "harness_iterations",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "serial",
"primaryKey": true,
"notNull": true
},
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"n": {
"name": "n",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"default": "'pending'"
},
"diagnosis": {
"name": "diagnosis",
"type": "text",
"primaryKey": false,
"notNull": false
},
"agent_output": {
"name": "agent_output",
"type": "text",
"primaryKey": false,
"notNull": false
},
"evals": {
"name": "evals",
"type": "jsonb",
"primaryKey": false,
"notNull": false
},
"diff_stats": {
"name": "diff_stats",
"type": "text",
"primaryKey": false,
"notNull": false
},
"started_at": {
"name": "started_at",
"type": "bigint",
"primaryKey": false,
"notNull": false
},
"completed_at": {
"name": "completed_at",
"type": "bigint",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.harness_model_usage": {
"name": "harness_model_usage",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "serial",
"primaryKey": true,
"notNull": true
},
"model_id": {
"name": "model_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"provider": {
"name": "provider",
"type": "text",
"primaryKey": false,
"notNull": true
},
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"task_slug": {
"name": "task_slug",
"type": "text",
"primaryKey": false,
"notNull": true
},
"iteration": {
"name": "iteration",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"input_tokens": {
"name": "input_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"output_tokens": {
"name": "output_tokens",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"duration_ms": {
"name": "duration_ms",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"timestamp": {
"name": "timestamp",
"type": "bigint",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.harness_tasks": {
"name": "harness_tasks",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"slug": {
"name": "slug",
"type": "text",
"primaryKey": false,
"notNull": true
},
"goal": {
"name": "goal",
"type": "text",
"primaryKey": false,
"notNull": true
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"default": "'pending'"
},
"iteration": {
"name": "iteration",
"type": "integer",
"primaryKey": false,
"notNull": true,
"default": 0
},
"max_iterations": {
"name": "max_iterations",
"type": "integer",
"primaryKey": false,
"notNull": true,
"default": 6
},
"started_at": {
"name": "started_at",
"type": "bigint",
"primaryKey": false,
"notNull": false
},
"completed_at": {
"name": "completed_at",
"type": "bigint",
"primaryKey": false,
"notNull": false
},
"project": {
"name": "project",
"type": "text",
"primaryKey": false,
"notNull": true,
"default": "'—'"
},
"evals": {
"name": "evals",
"type": "jsonb",
"primaryKey": false,
"notNull": true,
"default": "'{}'::jsonb"
},
"pr": {
"name": "pr",
"type": "jsonb",
"primaryKey": false,
"notNull": false
},
"spec": {
"name": "spec",
"type": "jsonb",
"primaryKey": false,
"notNull": true
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.users": {
"name": "users",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "serial",
"primaryKey": true,
"notNull": true
},
"email": {
"name": "email",
"type": "text",
"primaryKey": false,
"notNull": true
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {
"users_email_unique": {
"name": "users_email_unique",
"nullsNotDistinct": false,
"columns": [
"email"
]
}
},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {}
}
}

View File

@@ -0,0 +1,13 @@
{
"version": "7",
"dialect": "postgresql",
"entries": [
{
"idx": 0,
"version": "7",
"when": 1774124029586,
"tag": "0000_sparkling_gressill",
"breakpoints": true
}
]
}

View File

@@ -1,4 +1,16 @@
import { pgTable, serial, text, timestamp } from "drizzle-orm/pg-core";
import {
pgTable,
serial,
text,
timestamp,
boolean,
integer,
real,
jsonb,
bigint,
} from "drizzle-orm/pg-core";
// ─── EXISTING ───────────────────────────────────────────────
export const users = pgTable("users", {
id: serial("id").primaryKey(),
@@ -7,3 +19,101 @@ export const users = pgTable("users", {
createdAt: timestamp("created_at").defaultNow().notNull(),
updatedAt: timestamp("updated_at").defaultNow().notNull(),
});
// ─── HARNESS: CREDENTIALS ──────────────────────────────────
export const credentials = pgTable("harness_credentials", {
id: text("id").primaryKey(),
provider: text("provider").notNull(),
label: text("label").notNull(),
token: text("token").notNull(),
baseUrl: text("base_url"),
createdAt: timestamp("created_at").defaultNow().notNull(),
updatedAt: timestamp("updated_at").defaultNow().notNull(),
});
// ─── HARNESS: CURATED MODELS ───────────────────────────────
export const curatedModels = pgTable("harness_curated_models", {
id: text("id").primaryKey(),
name: text("name").notNull(),
provider: text("provider").notNull(),
enabled: boolean("enabled").default(true).notNull(),
contextWindow: integer("context_window"),
costPer1kInput: real("cost_per_1k_input"),
costPer1kOutput: real("cost_per_1k_output"),
createdAt: timestamp("created_at").defaultNow().notNull(),
updatedAt: timestamp("updated_at").defaultNow().notNull(),
});
// ─── HARNESS: MODEL USAGE ──────────────────────────────────
export const modelUsage = pgTable("harness_model_usage", {
id: serial("id").primaryKey(),
modelId: text("model_id").notNull(),
provider: text("provider").notNull(),
taskId: text("task_id").notNull(),
taskSlug: text("task_slug").notNull(),
iteration: integer("iteration").notNull(),
inputTokens: integer("input_tokens").notNull(),
outputTokens: integer("output_tokens").notNull(),
durationMs: integer("duration_ms").notNull(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(),
});
// ─── HARNESS: AGENT CONFIGS ────────────────────────────────
export const agentConfigs = pgTable("harness_agent_configs", {
id: text("id").primaryKey(),
name: text("name").notNull(),
runtime: text("runtime").notNull(), // "claude-code" | "codex" | "opencode"
modelId: text("model_id").notNull(),
provider: text("provider").notNull(),
maxTokens: integer("max_tokens"),
env: jsonb("env").$type<Record<string, string>>(),
createdAt: timestamp("created_at").defaultNow().notNull(),
updatedAt: timestamp("updated_at").defaultNow().notNull(),
});
// ─── HARNESS: TASKS ────────────────────────────────────────
export const tasks = pgTable("harness_tasks", {
id: text("id").primaryKey(),
slug: text("slug").notNull(),
goal: text("goal").notNull(),
status: text("status").notNull().default("pending"), // pending | running | completed | failed
iteration: integer("iteration").notNull().default(0),
maxIterations: integer("max_iterations").notNull().default(6),
startedAt: bigint("started_at", { mode: "number" }),
completedAt: bigint("completed_at", { mode: "number" }),
project: text("project").notNull().default("—"),
evals: jsonb("evals").$type<Record<string, unknown>>().default({}).notNull(),
pr: jsonb("pr").$type<{ number: number; title: string; status: string }>(),
spec: jsonb("spec").$type<{
slug: string;
goal: string;
project: string;
agentId: string;
maxIterations: number;
criteria: { label: string; target: string }[];
constraints: string[];
knowledgeRefs: string[];
}>().notNull(),
createdAt: timestamp("created_at").defaultNow().notNull(),
updatedAt: timestamp("updated_at").defaultNow().notNull(),
});
// ─── HARNESS: ITERATIONS ───────────────────────────────────
export const iterations = pgTable("harness_iterations", {
id: serial("id").primaryKey(),
taskId: text("task_id").notNull(),
n: integer("n").notNull(),
status: text("status").notNull().default("pending"), // pending | running | passed | failed
diagnosis: text("diagnosis"),
agentOutput: text("agent_output"),
evals: jsonb("evals").$type<Record<string, unknown>>(),
diffStats: text("diff_stats"),
startedAt: bigint("started_at", { mode: "number" }),
completedAt: bigint("completed_at", { mode: "number" }),
});

9
pnpm-lock.yaml generated
View File

@@ -72,9 +72,18 @@ importers:
apps/harness:
dependencies:
'@homelab/db':
specifier: workspace:^
version: link:../../packages/db
drizzle-orm:
specifier: ^0.36.0
version: 0.36.4(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(@types/react@19.2.14)(postgres@3.4.8)(react@19.2.4)
next:
specifier: ^15.1.0
version: 15.5.14(@opentelemetry/api@1.9.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
postgres:
specifier: ^3.4.0
version: 3.4.8
react:
specifier: ^19.0.0
version: 19.2.4