#!/usr/bin/env bun /** * Build the `scrum_findings_v1` corpus — Option B from 2026-04-26 * corpus pass. Self-feeding: each accepted scrum review's * `suggestions_preview` becomes a document, indexed under doc_id * `review::` so multi-iteration coexists. * * Re-run this whenever scrum_reviews.jsonl grows; the index_name stays * stable and the gateway will re-register metadata. */ import { readFileSync } from "node:fs"; import { resolve } from "node:path"; const ROOT = resolve(import.meta.dir, ".."); const GATEWAY = process.env.LH_GATEWAY ?? "http://localhost:3100"; const INDEX_NAME = process.env.LH_CORPUS_NAME ?? "scrum_findings_v1"; const SOURCE_LABEL = "scrum_findings"; const CHUNK_SIZE = Number(process.env.LH_CHUNK_SIZE ?? 1500); const OVERLAP = Number(process.env.LH_OVERLAP ?? 150); const MIN_PREVIEW_BYTES = 200; // skip stub rows interface Doc { id: string; text: string } function slugFile(path: string): string { return path.replace(/^crates\//, "").replace(/[^a-z0-9]+/gi, "_").slice(0, 40); } function compactTs(iso: string): string { return iso.replace(/[-:T]/g, "").slice(0, 14); // 20260424T110656 } function buildDocs(): Doc[] { const lines = readFileSync(resolve(ROOT, "data/_kb/scrum_reviews.jsonl"), "utf8").split("\n").filter(Boolean); const docs: Doc[] = []; const idCounts = new Map(); for (const line of lines) { let row: any; try { row = JSON.parse(line); } catch { continue; } const file = row.file ?? ""; const preview = row.suggestions_preview ?? ""; if (!file || preview.length < MIN_PREVIEW_BYTES) continue; const ts = compactTs(row.reviewed_at ?? ""); const baseId = `review:${slugFile(file)}:${ts || "no_ts"}`; // Multiple reviews with same ts (rare but possible) get a counter. const count = (idCounts.get(baseId) ?? 0) + 1; idCounts.set(baseId, count); const id = count === 1 ? baseId : `${baseId}_${count}`; const header = `File: ${file}\nReviewed: ${row.reviewed_at ?? "?"}\nModel: ${row.accepted_model ?? "?"}\nVerdict: ${row.verdict ?? "?"}\nFindings: ${row.findings_count ?? "?"}\n\n`; docs.push({ id, text: header + preview }); } return docs; } async function main() { const dryRun = process.argv.includes("--dry-run") || process.argv.includes("--print"); const printOnly = process.argv.includes("--print"); const docs = buildDocs(); const totalBytes = docs.reduce((s, d) => s + d.text.length, 0); console.log(`[corpus-B] ${docs.length} reviews · ${totalBytes} bytes · target chunk_size=${CHUNK_SIZE}`); console.log(`[corpus-B] file coverage: ${new Set(docs.map(d => d.id.split(":")[1])).size} unique files`); if (printOnly) { docs.slice(0, 3).forEach(d => console.log(` ${d.id} (${d.text.length}b) ${d.text.slice(0, 80).replace(/\n/g, " ")}…`)); return; } if (dryRun) return; const r = await fetch(`${GATEWAY}/vectors/index`, { method: "POST", headers: { "content-type": "application/json" }, body: JSON.stringify({ index_name: INDEX_NAME, source: SOURCE_LABEL, documents: docs, chunk_size: CHUNK_SIZE, overlap: OVERLAP, }), signal: AbortSignal.timeout(60_000), }); if (!r.ok) { console.error(`[corpus-B] HTTP ${r.status}: ${await r.text()}`); process.exit(1); } const j: any = await r.json(); console.log(`[corpus-B] job ${j.job_id} · ${j.documents} docs → ${j.chunks} chunks queued`); } main().catch(e => { console.error(e); process.exit(1); });