1 Commits

Author SHA1 Message Date
96d0a034cd debug 2026-01-22 02:18:41 -08:00
3 changed files with 9 additions and 9 deletions

View File

@@ -1,7 +1,7 @@
{ {
"model": null, "model": null,
"max_iters": 50, "max_iters": 50,
"lm": "openrouter/anthropic/claude-sonnet-4.5", "lm": "openrouter/openai/gpt-5.2-codex",
"sub_lm": "openrouter/openai/gpt-5-mini", "sub_lm": "openrouter/openai/gpt-5-mini",
"api_base": "https://openrouter.ai/api/v1", "api_base": "https://openrouter.ai/api/v1",
"max_tokens": 32000, "max_tokens": 32000,

View File

@@ -278,7 +278,7 @@ class ToolLoggingCallback(BaseCallback):
class RLMCodingConfig(PrecompiledConfig): class RLMCodingConfig(PrecompiledConfig):
max_iters: int = 50 max_iters: int = 50
lm: str = "openrouter/anthropic/claude-sonnet-4.5" lm: str = "openrouter/openai/gpt-5.2-codex"
sub_lm: str = "openrouter/openai/gpt-5-mini" sub_lm: str = "openrouter/openai/gpt-5-mini"
api_base: str = "https://openrouter.ai/api/v1" api_base: str = "https://openrouter.ai/api/v1"
max_tokens: int = 32000 max_tokens: int = 32000
@@ -302,16 +302,14 @@ class RLMCodingProgram(PrecompiledProgram):
"run_bash": run_bash, "run_bash": run_bash,
} }
# tool logging for introspections on multi-turn conversations
dspy.settings.configure(callbacks=[ToolLoggingCallback()])
self.lm = dspy.LM( self.lm = dspy.LM(
self.config.lm, model=self.config.lm,
api_base=self.config.api_base, api_base=self.config.api_base,
max_tokens=self.config.max_tokens, max_tokens=self.config.max_tokens,
track_usage=self.config.track_usage, track_usage=self.config.track_usage,
) )
self.sub_lm = dspy.LM( self.sub_lm = dspy.LM(
self.config.sub_lm, model=self.config.sub_lm,
api_base=self.config.api_base, api_base=self.config.api_base,
max_tokens=self.config.max_tokens, max_tokens=self.config.max_tokens,
track_usage=self.config.track_usage, track_usage=self.config.track_usage,
@@ -325,6 +323,8 @@ class RLMCodingProgram(PrecompiledProgram):
verbose=self.config.verbose, verbose=self.config.verbose,
) )
agent.set_lm(self.lm) agent.set_lm(self.lm)
print(f"Using model: {self.lm.model}")
print(f"Using sub-model: {self.sub_lm.model}")
self.agent = agent self.agent = agent
def forward(self, task: str) -> str: def forward(self, task: str) -> str:
@@ -595,5 +595,5 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
agent = RLMCodingProgram(RLMCodingConfig()) agent = RLMCodingProgram(RLMCodingConfig())
agent.push_to_hub(MODAIC_REPO_PATH, commit_message="Add MCP server support and long paste handling", tag="v0.0.4") agent.push_to_hub(MODAIC_REPO_PATH, commit_message="debug", tag="v0.0.5")
#main() #main()

View File

@@ -29,7 +29,7 @@
] ]
}, },
"lm": { "lm": {
"model": "openrouter/anthropic/claude-sonnet-4.5", "model": "openrouter/openai/gpt-5.2-codex",
"model_type": "chat", "model_type": "chat",
"cache": true, "cache": true,
"num_retries": 3, "num_retries": 3,
@@ -68,7 +68,7 @@
] ]
}, },
"lm": { "lm": {
"model": "openrouter/anthropic/claude-sonnet-4.5", "model": "openrouter/openai/gpt-5.2-codex",
"model_type": "chat", "model_type": "chat",
"cache": true, "cache": true,
"num_retries": 3, "num_retries": 3,