Claude (review-harness setup) f3ee4722a8 Phase A + B (MVP) — local review harness
Implements the MVP cutline from the planning artifact:
- Phase A: skeleton + CLI dispatch + provider interface + stub model doctor
- Phase B: scanner + git probe + 12 static analyzers + reporters + pipeline
- Phase B fixtures: clean-repo, insecure-repo, degraded-repo

12 static analyzers per PROMPT.md "Suggested Static Checks For MVP":
hardcoded_paths, shell_execution, raw_sql_interpolation, broad_cors,
secret_patterns, large_files, todo_comments, missing_tests,
env_file_committed, unsafe_file_io, exposed_mutation_endpoint,
hardcoded_local_ip.

Acceptance gates passing:
- B1 (intake produces accurate counts) ✓
- B2 (insecure fixture fires ≥8 distinct check_ids — actually 11/12) ✓
- B3 (clean fixture produces 0 confirmed findings — no false positives) ✓
- B4 (scrum mode produces all 6 required markdown + JSON reports) ✓
- B5 (receipts.json marks degraded phases honestly) ✓
- F  (self-review on this repo runs without crashing) ✓ — exit 66 (degraded
  because Phase C LLM review is hardcoded skipped)

Phases C (LLM review), D (validation cross-check), E (memory + diff +
rules subcommands) deferred per the cutline. The MVP delivers the
evidence-first path; LLM is purely additive.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-30 00:56:02 -05:00

153 lines
4.8 KiB
Go

// Package reporters writes the human-readable + machine-readable
// outputs the pipeline produces. JSON shapes mirror docs/REPORT_SCHEMA.md
// and PROMPT.md verbatim — this package is the contract between the
// harness and any downstream consumer (CI gate, observer, MCP tool).
package reporters
import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"os"
"path/filepath"
"time"
"local-review-harness/internal/analyzers"
"local-review-harness/internal/git"
"local-review-harness/internal/scanner"
)
// RepoIntake mirrors REVIEW_PIPELINE.md Phase 0 schema.
type RepoIntake struct {
RepoPath string `json:"repo_path"`
CurrentBranch string `json:"current_branch"`
LatestCommit string `json:"latest_commit"`
GitStatus string `json:"git_status"`
HasGit bool `json:"has_git"`
FileCount int `json:"file_count"`
LanguageBreakdown map[string]int `json:"language_breakdown"`
LargestFiles []LargestFile `json:"largest_files"`
DependencyManifests []string `json:"dependency_manifests"`
TestManifests []string `json:"test_manifests"`
GeneratedAt string `json:"generated_at"`
}
type LargestFile struct {
Path string `json:"path"`
Size int64 `json:"size"`
Lines int `json:"lines,omitempty"`
}
// StaticFindings is the wrapper shape with summary counts.
type StaticFindings struct {
GeneratedAt string `json:"generated_at"`
Findings []analyzers.Finding `json:"findings"`
Summary FindingsSummary `json:"summary"`
}
type FindingsSummary struct {
Total int `json:"total"`
Confirmed int `json:"confirmed"`
Suspected int `json:"suspected"`
Rejected int `json:"rejected"`
Critical int `json:"critical"`
High int `json:"high"`
Medium int `json:"medium"`
Low int `json:"low"`
BySource map[string]int `json:"by_source"`
ByCheck map[string]int `json:"by_check"`
}
// Receipt mirrors REPORT_SCHEMA.md "Receipt Schema". One per run.
type Receipt struct {
RunID string `json:"run_id"`
RepoPath string `json:"repo_path"`
StartedAt string `json:"started_at"`
FinishedAt string `json:"finished_at"`
Phases []PhaseReceipt `json:"phases"`
Summary FindingsSummary `json:"summary"`
}
type PhaseReceipt struct {
Name string `json:"name"`
Status string `json:"status"` // ok|degraded|failed|skipped
InputHash string `json:"input_hash,omitempty"`
OutputHash string `json:"output_hash,omitempty"`
OutputFiles []string `json:"output_files,omitempty"`
Errors []string `json:"errors,omitempty"`
}
// BuildIntake assembles the Phase 0 intake JSON from the scanner +
// git probes. Doesn't write — the pipeline owns file I/O.
func BuildIntake(scan *scanner.Result, gi git.Info) RepoIntake {
largest := make([]LargestFile, 0, len(scan.LargestFiles))
for _, f := range scan.LargestFiles {
largest = append(largest, LargestFile{Path: f.Path, Size: f.Size, Lines: f.Lines})
}
return RepoIntake{
RepoPath: scan.RepoPath,
CurrentBranch: gi.CurrentBranch,
LatestCommit: gi.LatestCommit,
GitStatus: gi.Status,
HasGit: gi.HasGit,
FileCount: len(scan.Files),
LanguageBreakdown: scan.LanguageBreakdown,
LargestFiles: largest,
DependencyManifests: scan.DependencyManifests,
TestManifests: scan.TestManifests,
GeneratedAt: time.Now().UTC().Format(time.RFC3339Nano),
}
}
// SummarizeFindings is the canonical roll-up. Used by both the
// per-phase JSON and the receipt summary.
func SummarizeFindings(findings []analyzers.Finding) FindingsSummary {
out := FindingsSummary{
Total: len(findings),
BySource: map[string]int{},
ByCheck: map[string]int{},
}
for _, f := range findings {
switch f.Status {
case analyzers.StatusConfirmed:
out.Confirmed++
case analyzers.StatusSuspected:
out.Suspected++
case analyzers.StatusRejected:
out.Rejected++
}
switch f.Severity {
case analyzers.SeverityCritical:
out.Critical++
case analyzers.SeverityHigh:
out.High++
case analyzers.SeverityMedium:
out.Medium++
case analyzers.SeverityLow:
out.Low++
}
out.BySource[string(f.Source)]++
if f.CheckID != "" {
out.ByCheck[f.CheckID]++
}
}
return out
}
// WriteJSON marshals + writes; sha256 returned for receipt cross-link.
func WriteJSON(path string, v any) (sha string, err error) {
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
return "", err
}
bs, err := json.MarshalIndent(v, "", " ")
if err != nil {
return "", err
}
bs = append(bs, '\n')
if err := os.WriteFile(path, bs, 0o644); err != nil {
return "", err
}
h := sha256.Sum256(bs)
return hex.EncodeToString(h[:])[:16], nil
}