root c7e3124208 §3.8 second slice: real modes wired (matrix.relevance/downgrade/search,
distillation.score, drift.scorer)

Lands the workflow.Mode adapters for the §3.4 components + the
distillation scorer + drift quantifier. Workflows can now compose
real measurement capabilities; the substrate's parallel
capabilities become composable Lego bricks (per the prior commit's
closing insight).

Modes registered (in observerd's registerBuiltinModes):

  Pure-function wrappers (no I/O):
    - matrix.relevance    → matrix.FilterChunks
    - matrix.downgrade    → matrix.MaybeDowngrade
    - distillation.score  → distillation.ScoreRecord
    - drift.scorer        → drift.ComputeScorerDrift

  HTTP-backed:
    - matrix.search       → POST matrixd /matrix/search
                             (registered only when matrixd_url is set)

  Fixture (kept from §3.8 first slice):
    - fixture.echo, fixture.upper

internal/workflow/modes.go:
  Each mode follows the same glue pattern: marshal generic input
  through a typed struct (free schema validation + clear error
  messages), call the underlying capability, return a generic
  output map. Roundtrip-via-JSON gives us schema validation
  without writing custom field-by-field coercion.

internal/workflow/modes_test.go (10 tests, all PASS):
  - matrix.relevance filters adjacency pollution (Connector kept,
    catalogd::Registry dropped — same headline as the relevance
    smoke, run through the workflow mode)
  - matrix.downgrade flips lakehouse→isolation on strong model;
    keeps lakehouse on weak (qwen3.5:latest); errors on missing
    fields
  - distillation.score rates scrum_review attempt_1 as accepted;
    rejects empty record
  - drift.scorer reports zero drift on matched inputs; errors on
    empty inputs slice
  - matrix.search HTTP flow round-trips through httptest fake
    matrixd; non-OK status surfaces a clear error

scripts/workflow_smoke.sh (5 assertions PASS, was 4):
  New assertion #5: real-mode chain
    matrix.downgrade (lakehouse + grok-4.1-fast → isolation)
    → distillation.score (scrum_review attempt_1 → accepted)
  Proves §3.4 components compose through the workflow runner with
  no fixture intermediation. Both nodes ran successfully, runner
  recorded provenance, status=succeeded.

  Mode listing assertion now expects 7 modes (5 real + 2 fixture)
  instead of just the fixtures.

17-smoke regression all green. SPEC §3.8 acceptance gate G3.8.D
("Mode catalog dispatches matrix.search invocation to the matrixd
backend without going through HTTP") still pending — current path
goes through HTTP for matrix.search, which is the cleaner service-
mesh shape but slower than direct in-process. In-process dispatch
when matrixd is co-resident is a future optimization.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-29 20:39:26 -05:00

215 lines
7.1 KiB
Go

package workflow
// modes.go — adapters that wrap §3.4 capabilities + §3.5 drift +
// distillation scorer as workflow.Mode functions. Each mode follows
// the same glue pattern: marshal the generic input map through a
// typed struct (so workflow YAML schemas are self-documenting and
// validation errors are clear), call the underlying capability,
// return a generic output map.
//
// Pure modes (no I/O): MatrixRelevance, MatrixDowngrade,
// DistillationScore, DriftScorer.
//
// HTTP modes: MatrixSearch + PlaybookRecord — observerd talks to
// matrixd over HTTP since the search/record paths need vectord
// access. Constructed via factory funcs that take the matrixd base
// URL + an http.Client.
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"git.agentview.dev/profit/golangLAKEHOUSE/internal/distillation"
"git.agentview.dev/profit/golangLAKEHOUSE/internal/drift"
"git.agentview.dev/profit/golangLAKEHOUSE/internal/matrix"
)
// ─── Pure-function wrappers ─────────────────────────────────────
// MatrixRelevance wraps matrix.FilterChunks. Input shape:
//
// {
// "focus": {"Path":"...", "Content":"...", ...},
// "chunks": [{"source":"...", "doc_id":"...", "text":"...", "score":0.8}, ...],
// "threshold": 0.3 # optional; default = matrix.DefaultRelevanceThreshold
// }
//
// Output: {"kept":[...], "dropped":[...], "threshold":N, "total_in":N}.
func MatrixRelevance(_ Context, input map[string]any) (map[string]any, error) {
var req struct {
Focus matrix.FocusFile `json:"focus"`
Chunks []matrix.CandidateChunk `json:"chunks"`
Threshold float64 `json:"threshold"`
}
if err := remarshalInput(input, &req); err != nil {
return nil, fmt.Errorf("matrix.relevance: %w", err)
}
threshold := req.Threshold
if threshold == 0 {
threshold = matrix.DefaultRelevanceThreshold
}
res := matrix.FilterChunks(req.Focus, req.Chunks, threshold)
return map[string]any{
"kept": res.Kept,
"dropped": res.Dropped,
"threshold": res.Threshold,
"total_in": res.TotalIn,
}, nil
}
// MatrixDowngrade wraps matrix.MaybeDowngrade. Input shape:
//
// {
// "mode": "codereview_lakehouse",
// "model": "x-ai/grok-4.1-fast",
// "forced_mode": false, # optional
// "force_full_override": false # optional
// }
//
// Output: matrix.DowngradeDecision JSON.
func MatrixDowngrade(_ Context, input map[string]any) (map[string]any, error) {
var req struct {
Mode string `json:"mode"`
Model string `json:"model"`
ForcedMode bool `json:"forced_mode"`
ForceFullOverride bool `json:"force_full_override"`
}
if err := remarshalInput(input, &req); err != nil {
return nil, fmt.Errorf("matrix.downgrade: %w", err)
}
if req.Mode == "" || req.Model == "" {
return nil, fmt.Errorf("matrix.downgrade: mode and model are required")
}
dec := matrix.MaybeDowngrade(matrix.DowngradeInput{
Mode: req.Mode,
Model: req.Model,
ForcedMode: req.ForcedMode,
ForceFullOverride: req.ForceFullOverride,
})
return map[string]any{
"mode": dec.Mode,
"downgraded_from": dec.DowngradedFrom,
"reason": dec.Reason,
}, nil
}
// DistillationScore wraps distillation.ScoreRecord — re-runs the
// scorer over a single EvidenceRecord. Useful as a workflow node
// that grades a freshly-produced evidence row.
//
// Input: a JSON EvidenceRecord under the key "record":
//
// {"record": {"run_id":"...", "task_id":"...", ...}}
//
// Output: ScoreOutput-ish map with category, reasons, sub_scores.
func DistillationScore(_ Context, input map[string]any) (map[string]any, error) {
var req struct {
Record distillation.EvidenceRecord `json:"record"`
}
if err := remarshalInput(input, &req); err != nil {
return nil, fmt.Errorf("distillation.score: %w", err)
}
if req.Record.RunID == "" {
return nil, fmt.Errorf("distillation.score: record.run_id required")
}
out := distillation.ScoreRecord(req.Record)
return map[string]any{
"category": string(out.Category),
"reasons": out.Reasons,
"sub_scores": out.SubScores,
}, nil
}
// DriftScorer wraps drift.ComputeScorerDrift. Input shape:
//
// {
// "inputs": [
// {"record": {...EvidenceRecord...}, "persisted_category": "accepted"},
// ...
// ],
// "include_entries": false # optional, default false
// }
//
// Output: ScorerDriftReport JSON.
func DriftScorer(_ Context, input map[string]any) (map[string]any, error) {
var req struct {
Inputs []drift.ScorerDriftInput `json:"inputs"`
IncludeEntries bool `json:"include_entries"`
}
if err := remarshalInput(input, &req); err != nil {
return nil, fmt.Errorf("drift.scorer: %w", err)
}
if len(req.Inputs) == 0 {
return nil, fmt.Errorf("drift.scorer: inputs must be non-empty")
}
report := drift.ComputeScorerDrift(req.Inputs, req.IncludeEntries)
bs, err := json.Marshal(report)
if err != nil {
return nil, err
}
var asMap map[string]any
if err := json.Unmarshal(bs, &asMap); err != nil {
return nil, err
}
return asMap, nil
}
// ─── HTTP-backed modes ──────────────────────────────────────────
// MatrixSearch returns a workflow.Mode bound to a matrixd base URL
// and HTTP client. The mode posts to /v1/matrix/search via the
// gateway-internal upstream (caller passes the URL).
//
// Input shape mirrors matrix.SearchRequest (see retrieve.go).
// Output is the matrix.SearchResponse JSON.
func MatrixSearch(matrixdURL string, hc *http.Client) Mode {
return func(ctx Context, input map[string]any) (map[string]any, error) {
bs, err := json.Marshal(input)
if err != nil {
return nil, fmt.Errorf("matrix.search: marshal: %w", err)
}
req, err := http.NewRequestWithContext(ctx.Ctx, http.MethodPost,
matrixdURL+"/matrix/search", bytes.NewReader(bs))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
resp, err := hc.Do(req)
if err != nil {
return nil, fmt.Errorf("matrix.search: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("matrix.search: status %d: %s", resp.StatusCode, body)
}
var out map[string]any
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
return nil, fmt.Errorf("matrix.search: decode: %w", err)
}
return out, nil
}
}
// ─── Helpers ─────────────────────────────────────────────────────
// remarshalInput round-trips a generic input map through JSON into
// the typed target struct. Same trick as the matrixd handlers — gives
// us schema validation for free without writing custom field-by-field
// coercion.
func remarshalInput(input map[string]any, target any) error {
bs, err := json.Marshal(input)
if err != nil {
return err
}
return json.Unmarshal(bs, target)
}
// silence "imported and not used" if context isn't referenced after
// the MatrixSearch factory is used. Compiler will catch the real case.
var _ = context.Background