Architectural snapshot of the lakehouse codebase at the point where the
full matrix-driven agent loop with Mem0 versioning + deletion was
validated end-to-end.
WHAT THIS REPO IS
A clean single-commit snapshot of the lakehouse code. Heavy test data
(.parquet datasets, vector indexes) excluded — see REPLICATION.md for
regen path. Full lakehouse history at git.agentview.dev/profit/lakehouse.
WHAT WAS PROVEN
- Vector retrieval across multi-corpora matrix (chicago_permits + entity
briefs + sec_tickers + distilled procedural + llm_team runs)
- Observer hand-review (cloud + heuristic fallback) gating each candidate
- Local-model agent loop (qwen3.5:latest) with tool use + scratchpad
- Playbook seal on success → next-iter retrieval surfaces it as preamble
- Mem0 versioning + deletion in pathway_memory:
* UPSERT: ADD on new workflow, UPDATE bumps replay_count on identical
* REVISE: chains versions, parent.superseded_at + superseded_by stamped
* RETIRE: marks specific trace retired with reason, excluded from retrieval
* HISTORY: walks chain root→tip, cycle-safe
KEY DIRECTORIES
- crates/vectord/src/pathway_memory.rs — Mem0 ops live here
- crates/vectord/src/playbook_memory.rs — original Mem0 reference
- tests/agent_test/ — local-model agent harness + PRD + session archives
- scripts/dump_raw_corpus.sh — MinIO bucket dump (raw test corpus)
- scripts/vectorize_raw_corpus.ts — corpus → vector indexes
- scripts/analyze_chicago_contracts.ts — real inference pipeline
- scripts/seal_agent_playbook.ts — Mem0 upsert from agent traces
Replication: see REPLICATION.md for Debian 13 clean install + cloud-only
adaptation (no local Ollama).
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
53 lines
1.8 KiB
TypeScript
53 lines
1.8 KiB
TypeScript
// Daily cost tracker. Resets at UTC midnight by keying the file by date.
|
|
// Hard ceilings are the policy surface — the bot refuses to call the
|
|
// cloud once the budget is exhausted for the day.
|
|
|
|
import { readFile, writeFile, mkdir } from "node:fs/promises";
|
|
import { join } from "node:path";
|
|
import type { CostState } from "./types.ts";
|
|
|
|
const COST_DIR = "/home/profit/lakehouse/data/_bot";
|
|
|
|
export const DAILY_CALLS_BUDGET = 20;
|
|
export const DAILY_TOKENS_BUDGET = 8000 * DAILY_CALLS_BUDGET; // 160k tokens/day ceiling
|
|
|
|
function todayUtc(): string {
|
|
return new Date().toISOString().slice(0, 10);
|
|
}
|
|
|
|
function costFile(date: string): string {
|
|
return join(COST_DIR, `cost-${date}.json`);
|
|
}
|
|
|
|
export async function readCost(): Promise<CostState> {
|
|
const date = todayUtc();
|
|
try {
|
|
const raw = await readFile(costFile(date), "utf8");
|
|
return JSON.parse(raw) as CostState;
|
|
} catch {
|
|
return { date, calls: 0, tokens: 0 };
|
|
}
|
|
}
|
|
|
|
export async function recordCost(calls: number, tokens: number): Promise<CostState> {
|
|
await mkdir(COST_DIR, { recursive: true });
|
|
const current = await readCost();
|
|
const updated: CostState = {
|
|
date: current.date,
|
|
calls: current.calls + calls,
|
|
tokens: current.tokens + tokens,
|
|
};
|
|
await writeFile(costFile(updated.date), JSON.stringify(updated, null, 2));
|
|
return updated;
|
|
}
|
|
|
|
export function budgetCheck(c: CostState): { ok: boolean; reason: string } {
|
|
if (c.calls >= DAILY_CALLS_BUDGET) {
|
|
return { ok: false, reason: `daily call budget exhausted (${c.calls}/${DAILY_CALLS_BUDGET})` };
|
|
}
|
|
if (c.tokens >= DAILY_TOKENS_BUDGET) {
|
|
return { ok: false, reason: `daily token budget exhausted (${c.tokens}/${DAILY_TOKENS_BUDGET})` };
|
|
}
|
|
return { ok: true, reason: `${c.calls}/${DAILY_CALLS_BUDGET} calls, ${c.tokens}/${DAILY_TOKENS_BUDGET} tokens used today` };
|
|
}
|