# Phase 39: Provider Registry # # Per-provider base_url, auth scheme, and default model. The gateway's # /v1/chat dispatcher reads this file at boot to populate its provider # table. Secrets (API keys) come from /etc/lakehouse/secrets.toml or # environment variables — NEVER inline a key here. # # Adding a new provider: # 1. New [[provider]] block with name, base_url, auth, default_model # 2. Matching adapter at crates/aibridge/src/providers/.rs # implementing the ProviderAdapter trait (chat + embed + unload) # 3. Route arm in crates/gateway/src/v1/mod.rs matching on `name` # 4. Model-prefix routing hint in resolve_provider() if the provider # uses an "/..." model prefix (e.g. "openrouter/...") [[provider]] name = "ollama" base_url = "http://localhost:3200" auth = "none" default_model = "qwen3.5:latest" # Hot-path local inference. No bearer needed — Python sidecar on # localhost handles the Ollama API. Model names are bare # (e.g. "qwen3.5:latest", not "ollama/qwen3.5:latest"). [[provider]] name = "ollama_cloud" base_url = "https://ollama.com" auth = "bearer" auth_env = "OLLAMA_CLOUD_KEY" default_model = "gpt-oss:120b" # Cloud-tier Ollama. Key resolved from OLLAMA_CLOUD_KEY env at gateway # boot. Model-prefix routing: "cloud/" auto-routes here # (see gateway::v1::resolve_provider). [[provider]] name = "openrouter" base_url = "https://openrouter.ai/api/v1" auth = "bearer" auth_env = "OPENROUTER_API_KEY" auth_fallback_files = ["/home/profit/.env", "/root/llm_team_config.json"] default_model = "openai/gpt-oss-120b:free" # Multi-provider gateway. Covers Anthropic, Google, OpenAI, MiniMax, # Qwen, Gemma, etc. Key resolved via crates/gateway/src/v1/openrouter.rs # resolve_openrouter_key() — env first, then fallback files. # Model-prefix routing: "openrouter//" auto-routes here, # prefix stripped before upstream call. # Planned (Phase 40 long-horizon — adapters not yet shipped): # # [[provider]] # name = "gemini" # base_url = "https://generativelanguage.googleapis.com/v1beta" # auth = "api_key_query" # auth_env = "GEMINI_API_KEY" # default_model = "gemini-2.0-flash" # # [[provider]] # name = "claude" # base_url = "https://api.anthropic.com/v1" # auth = "x_api_key" # auth_env = "ANTHROPIC_API_KEY" # default_model = "claude-3-5-sonnet-latest"