scrum: cloud-default models — swap mistral:latest for ollama_cloud::gpt-oss:120b #1

Open
profit wants to merge 1 commits from scrum/cloud-default-models into main

View File

@ -9956,7 +9956,7 @@ def _run_meta_pipeline(pipeline_id):
stages = pipe["stages"] or ["extract", "research", "validate", "synthesize"]
data_source = pipe["data_source"]
config = pipe["config"] or {}
model_sets = config.get("model_sets", [["qwen2.5:latest"], ["mistral:latest"], ["gemma2:latest"]])
model_sets = config.get("model_sets", [["qwen2.5:latest"], ["ollama_cloud::gpt-oss:120b"], ["gemma2:latest"]])
max_iterations = config.get("max_iterations", len(model_sets))
_meta_status[pipeline_id] = {"stage": 0, "substep": "Gathering data...", "progress": 0, "iteration": 0}
@ -10081,7 +10081,7 @@ def create_meta_pipeline():
all_m = [m["name"] for m in resp.json().get("models", []) if m["size"] > 1e9]
models = [[m] for m in all_m[:4]]
except Exception:
models = [["qwen2.5:latest"], ["mistral:latest"]]
models = [["qwen2.5:latest"], ["ollama_cloud::gpt-oss:120b"]]
config = {"model_sets": models, "max_iterations": len(models)}
with get_db() as conn:
@ -11832,7 +11832,7 @@ def run_refine(config):
start = time.time() * 1000
prompt = config["prompt"]
orchestrator = config.get("orchestrator", "qwen2.5:latest")
workers = config.get("models", ["qwen2.5:latest", "mistral:latest"])
workers = config.get("models", ["qwen2.5:latest", "ollama_cloud::gpt-oss:120b"])
max_stages = config.get("max_stages", 5)
yield sse({"type": "clear"})
steps = []