This commit is contained in:
2026-01-22 03:23:47 -08:00
parent 96d0a034cd
commit 901aa2ede9

View File

@@ -7,7 +7,6 @@ import json
import tempfile
from modaic import PrecompiledProgram, PrecompiledConfig
import dspy
from dspy.utils.callback import BaseCallback
# --- Modaic ---
@@ -62,6 +61,8 @@ def read_file(path: str, offset: int = 0, limit: int = None) -> str:
Returns:
File contents with line numbers
"""
print(f"{MAGENTA}⏺ Reading file: {path}{RESET}")
lines = open(path).readlines()
if limit is None:
limit = len(lines)
@@ -79,6 +80,7 @@ def write_file(path: str, content: str) -> str:
Returns:
'ok' on success
"""
print(f"{MAGENTA}⏺ Writing file: {path}{RESET}")
with open(path, "w") as f:
f.write(content)
return "ok"
@@ -96,6 +98,7 @@ def edit_file(path: str, old: str, new: str, replace_all: bool = False) -> str:
Returns:
'ok' on success, error message on failure
"""
print(f"{MAGENTA}⏺ Editing file: {path}{RESET}")
text = open(path).read()
if old not in text:
return "error: old_string not found"
@@ -118,6 +121,7 @@ def glob_files(pattern: str, path: str = ".") -> str:
Returns:
Newline-separated list of matching files
"""
print(f"{MAGENTA}⏺ Finding files with pattern: {pattern}{RESET}")
full_pattern = (path + "/" + pattern).replace("//", "/")
files = globlib.glob(full_pattern, recursive=True)
files = sorted(
@@ -138,6 +142,7 @@ def grep_files(pattern: str, path: str = ".") -> str:
Returns:
Matching lines in format 'filepath:line_num:content'
"""
print(f"{MAGENTA}⏺ Searching for pattern: {pattern}{RESET}")
regex = re.compile(pattern)
hits = []
for filepath in globlib.glob(path + "/**", recursive=True):
@@ -162,6 +167,7 @@ def run_bash(cmd: str) -> str:
Returns:
Command output (stdout and stderr combined)
"""
print(f"{MAGENTA}⏺ Running command: {cmd}{RESET}")
proc = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
)
@@ -197,11 +203,6 @@ AVAILABLE_MODELS = {
def select_model():
"""Interactive model selection or use environment variable."""
model_env = os.getenv("MODEL")
if model_env:
print(f"{GREEN}⏺ Using model from environment: {model_env}{RESET}")
return model_env
print(f"\n{BOLD}Select a model:{RESET}")
for key, (name, model_id) in AVAILABLE_MODELS.items():
print(f" {BLUE}{key}{RESET}. {name} ({DIM}{model_id}{RESET})")
@@ -246,36 +247,6 @@ class CodingAssistant(dspy.Signature):
desc="List of files that were written or modified during the task"
)
class ToolLoggingCallback(BaseCallback):
"""Callback that logs tool calls as they happen."""
def on_tool_start(self, call_id, instance, inputs):
"""Log when a tool starts executing."""
tool_name = instance.name if hasattr(instance, "name") else str(instance)
# Format args nicely
args_str = ", ".join(f"{k}={repr(v)[:50]}" for k, v in inputs.items())
print(f" {MAGENTA}{tool_name}({args_str}){RESET}", flush=True)
def on_tool_end(self, call_id, outputs, exception):
"""Log when a tool finishes executing."""
if exception:
print(f" {RED}Error: {exception}{RESET}", flush=True)
def on_module_end(self, call_id, outputs, exception):
"""Log when the finish tool is called (ReAct completion)."""
# Check if this is a ReAct prediction with tool_calls
if outputs and "tool_calls" in outputs:
for call in outputs["tool_calls"]:
args_str = ", ".join(
f"{k}={repr(v)[:50]}" for k, v in call.args.items()
)
if call.name == "finish":
print(f" {GREEN}⏺ finish{RESET}", flush=True)
else:
print(f" {MAGENTA}{call.name}({args_str}){RESET}", flush=True)
class RLMCodingConfig(PrecompiledConfig):
max_iters: int = 50
lm: str = "openrouter/openai/gpt-5.2-codex"
@@ -291,8 +262,9 @@ class RLMCodingProgram(PrecompiledProgram):
config: RLMCodingConfig
def __init__(self, config: RLMCodingConfig, **kwargs):
self.config = config
super().__init__(config, **kwargs)
self.config = config
self.tools = {
"read_file": read_file,
"write_file": write_file,
@@ -322,11 +294,13 @@ class RLMCodingProgram(PrecompiledProgram):
max_iterations=self.config.max_iters,
verbose=self.config.verbose,
)
agent.set_lm(self.lm)
print(f"Using model: {self.lm.model}")
print(f"Using sub-model: {self.sub_lm.model}")
self.agent = agent
print(f"Using model: {self.agent.get_lm().model}")
print(f"Using sub-model: {self.agent.sub_lm.model}")
def forward(self, task: str) -> str:
assert task, "Task cannot be empty"
return self.agent(task=task)
@@ -595,5 +569,5 @@ def main():
if __name__ == "__main__":
agent = RLMCodingProgram(RLMCodingConfig())
agent.push_to_hub(MODAIC_REPO_PATH, commit_message="debug", tag="v0.0.5")
agent.push_to_hub(MODAIC_REPO_PATH, commit_message="debug", branch="prod")
#main()