diff --git a/auditor/fixtures/hybrid_38_40_45.ts b/auditor/fixtures/hybrid_38_40_45.ts index 8e3ed16..f8472b9 100644 --- a/auditor/fixtures/hybrid_38_40_45.ts +++ b/auditor/fixtures/hybrid_38_40_45.ts @@ -102,6 +102,15 @@ export async function runHybridFixture(): Promise { // ======================================================================== // Layer 1 — Phase 38: POST /v1/chat returns valid OpenAI shape // ======================================================================== + // Captured HERE, immediately before the chat layer runs, so layer 2's + // Langfuse-trace filter uses the actual moment the chat call was + // attempted — not the fixture start time. Earlier draft had a + // meaningless ternary returning result.ran_at on both branches; the + // LLM-Team codereview (2026-04-22) caught this and flagged it as a + // false-negative window on traces created between fixture-start and + // chat-fetch. + const chat_request_sent_ms = Date.now(); + const l1 = await measureLayer("phase38_chat", "38", async () => { const r = await fetch(`${GATEWAY}/v1/chat`, { method: "POST", @@ -150,11 +159,13 @@ export async function runHybridFixture(): Promise { if (!r.ok) throw new Error(`langfuse ${r.status}: ${await r.text()}`); const j: any = await r.json(); const items = Array.isArray(j.data) ? j.data : []; - // Find a trace newer than our l1 start timestamp. - const ourStart = Date.parse(l1.evidence.match(/tokens=/) ? result.ran_at : result.ran_at); - const recent = items.filter((t: any) => Date.parse(t.timestamp) >= ourStart); + // Filter on the chat-request timestamp captured above. A Langfuse + // trace must be newer than the moment we fired /v1/chat to plausibly + // belong to our request. Using fixture start time (result.ran_at) + // was wrong and could false-negative on slow fixtures. + const recent = items.filter((t: any) => Date.parse(t.timestamp) >= chat_request_sent_ms); if (recent.length === 0) { - throw new Error(`no v1.chat:ollama trace since ${new Date(ourStart).toISOString()} (${items.length} older traces visible, Langfuse reachable — tracing is not firing)`); + throw new Error(`no v1.chat:ollama trace since ${new Date(chat_request_sent_ms).toISOString()} (${items.length} older traces visible, Langfuse reachable — tracing is not firing)`); } const trace = recent[0]; return {