#!/bin/bash # # External Memory Layer CLI for Claude Code # ========================================== # Token-efficient storage and retrieval of large outputs, transcripts, and context. # # Commands: # log Store content in memory (auto-chunks large content) # log --file Store file contents # log --stdin Read from stdin # fetch Retrieve memory entry # fetch --summary-only Get just the summary # fetch --chunk N Get specific chunk # list [--type TYPE] List memory entries # search Search memory by content/tags # summarize Generate/show summary # refs --checkpoint Get memory refs for checkpoint # prune Clean old entries # stats Show memory statistics # # Examples: # echo "Large output..." | memory log --stdin --tag "test" # memory fetch mem-20260123-123456-abcd --summary-only # memory list --type output --limit 10 # memory search "error" # # Token Guidelines: # - Content < 500 tokens: stored inline # - Content 500-4000 tokens: stored in file with summary # - Content > 4000 tokens: auto-chunked with parent summary # # Integration: # - Link to checkpoints: --checkpoint # - Link to directories: --directory # - Summaries auto-generated for efficient retrieval # # Documentation: /opt/agent-governance/docs/MEMORY_LAYER.md set -euo pipefail MEMORY_SCRIPT="/opt/agent-governance/memory/memory.py" # Show help if no args or --help if [[ $# -eq 0 ]] || [[ "${1:-}" == "--help" ]] || [[ "${1:-}" == "-h" ]]; then cat << 'EOF' External Memory Layer ===================== Commands: log Store content in memory log --file Store file contents log --stdin Read from stdin fetch Retrieve memory entry fetch -s Summary only fetch --chunk N Get specific chunk list [--type TYPE] List entries search Search memory summarize Generate summary refs --checkpoint Memory refs for checkpoint prune Clean old entries stats Show statistics Options for 'log': --file, -f Read from file --stdin Read from stdin --type, -t Type: transcript, output, context --tag Add tag (repeatable) --checkpoint Link to checkpoint --directory, -d Link to directory --no-chunk Don't auto-chunk large content --json Output JSON Token Thresholds: < 500 tokens → stored inline 500-4000 tokens → stored in file + summary > 4000 tokens → auto-chunked + summary Examples: # Store large test output pytest tests/ 2>&1 | memory log --stdin --tag "pytest" --tag "tests" # Fetch just the summary memory fetch mem-20260123-123456-abcd -s # Get chunk 2 of a large entry memory fetch mem-20260123-123456-abcd --chunk 2 # List recent outputs memory list --type output --limit 5 # Search for errors memory search "failed" --limit 10 Documentation: /opt/agent-governance/docs/MEMORY_LAYER.md EOF exit 0 fi # Verify memory script exists if [[ ! -f "${MEMORY_SCRIPT}" ]]; then echo "Error: memory.py not found at ${MEMORY_SCRIPT}" exit 1 fi # Execute memory command exec python3 "${MEMORY_SCRIPT}" "$@"