From 2cdd65f3d13d0c0b0260a430080750a1d6de5e47 Mon Sep 17 00:00:00 2001 From: Farouk Adeleke Date: Sun, 30 Nov 2025 06:20:20 -0500 Subject: [PATCH] Unoptimized Clinical Impact Judge --- agent.json | 2 +- config.json | 4 ++-- src/llm_judge/cli/run_gepa.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/agent.json b/agent.json index cb0c01e..0a8d5cb 100644 --- a/agent.json +++ b/agent.json @@ -25,7 +25,7 @@ ] }, "lm": { - "model": "openroutergoogle/gemini-2.5-pro", + "model": "openrouter/google/gemini-2.5-pro", "model_type": "chat", "cache": true, "num_retries": 3, diff --git a/config.json b/config.json index 62206ab..4b3b089 100644 --- a/config.json +++ b/config.json @@ -1,6 +1,6 @@ { - "task_model": "openroutergoogle/gemini-2.5-pro", - "reflection_model": "openrouteranthropic/claude-4-sonnet", + "task_model": "openrouter/google/gemini-2.5-pro", + "reflection_model": "openrouter/anthropic/claude-4-sonnet", "max_tokens": 8000, "temperature": 0.1, "test_size": 50, diff --git a/src/llm_judge/cli/run_gepa.py b/src/llm_judge/cli/run_gepa.py index bd958c8..30f904d 100644 --- a/src/llm_judge/cli/run_gepa.py +++ b/src/llm_judge/cli/run_gepa.py @@ -75,7 +75,7 @@ def main(): print(f"Using separate reflection model: {args.reflection_model}") model_base = ( - args.provider + f"{args.provider}/" if args.provider == "openrouter" or args.provider == "bedrock" or args.provider == "ollama_chat"