lakehouse/Cargo.toml
root 26fc98c885 Phase 7: Vector index + RAG pipeline
- vectord crate: chunk → embed → store → search → RAG
- chunker: configurable chunk size + overlap, sentence-boundary aware splitting
- store: embeddings as Parquet (binary blob f32 vectors), portable format
- search: brute-force cosine similarity (works up to ~100K vectors)
- rag: full pipeline — embed question → search index → retrieve context → LLM answer
- Endpoints: POST /vectors/index, /vectors/search, /vectors/rag
- Gateway wired with vectord service
- Tested: 200 candidate resumes indexed in 5.4s, semantic search + RAG working
- 20 unit tests passing (chunker, search, ingestd, shared)
- AI gives honest "no match found" when context doesn't support an answer

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-27 08:12:28 -05:00

46 lines
1.1 KiB
TOML

[workspace]
resolver = "2"
members = [
"crates/shared",
"crates/proto",
"crates/storaged",
"crates/catalogd",
"crates/queryd",
"crates/aibridge",
"crates/ingestd",
"crates/vectord",
"crates/gateway",
"crates/ui",
]
[workspace.dependencies]
tokio = { version = "1", features = ["full"] }
axum = "0.8"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
thiserror = "2"
uuid = { version = "1", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
tower-http = { version = "0.6", features = ["cors", "trace"] }
object_store = { version = "0.12", features = ["aws"] }
arrow = "55"
parquet = { version = "55", features = ["arrow", "async"] }
datafusion = "47"
bytes = "1"
futures = "0.3"
sha2 = "0.10"
url = "2"
tonic = "0.13"
prost = "0.13"
tonic-build = "0.13"
opentelemetry = "0.28"
opentelemetry_sdk = { version = "0.28", features = ["rt-tokio"] }
opentelemetry-stdout = { version = "0.28", features = ["trace"] }
tracing-opentelemetry = "0.29"
toml = "0.8"
csv = "1"
lopdf = "0.35"
encoding_rs = "0.8"