From f07effc51e477eded211841d054165799995df3b Mon Sep 17 00:00:00 2001 From: Farouk Adeleke Date: Wed, 21 Jan 2026 22:12:25 -0800 Subject: [PATCH] Add MCP server support and long paste handling --- config.json | 6 +- nanocode.py | 203 ++++++++++++++++++++++++++++++++++++++++++++----- program.json | 4 +- pyproject.toml | 2 +- 4 files changed, 190 insertions(+), 25 deletions(-) diff --git a/config.json b/config.json index 9ecf59b..389ea8a 100644 --- a/config.json +++ b/config.json @@ -1,8 +1,8 @@ { "model": null, - "max_iters": 20, - "lm": "openrouter/anthropic/claude-3.5-sonnet", - "sub_lm": "openrouter/openai/gpt-4.1", + "max_iters": 50, + "lm": "openrouter/anthropic/claude-sonnet-4.5", + "sub_lm": "openrouter/openai/gpt-5-mini", "api_base": "https://openrouter.ai/api/v1", "max_tokens": 32000, "max_output_chars": 100000, diff --git a/nanocode.py b/nanocode.py index 6e768b3..56b94fe 100644 --- a/nanocode.py +++ b/nanocode.py @@ -2,6 +2,9 @@ import os import re import glob as globlib import subprocess +import shlex +import json +import tempfile from modaic import PrecompiledProgram, PrecompiledConfig import dspy from dspy.utils.callback import BaseCallback @@ -24,6 +27,16 @@ MAGENTA = "\033[35m" # --- Display utilities --- +LONG_PASTE_THRESHOLD = int(os.environ.get("NANOCODE_LONG_PASTE_THRESHOLD", "4000")) + + +def save_long_paste(text: str) -> str: + fd, path = tempfile.mkstemp(prefix="nanocode_paste_", suffix=".txt") + with os.fdopen(fd, "w") as handle: + handle.write(text) + return path + + def separator(): """Return a horizontal separator line that fits the terminal width.""" @@ -264,9 +277,9 @@ class ToolLoggingCallback(BaseCallback): class RLMCodingConfig(PrecompiledConfig): - max_iters: int = 20 - lm: str = "openrouter/anthropic/claude-3.5-sonnet" # Default fallback - sub_lm: str = "openrouter/openai/gpt-4.1" # Default fallback + max_iters: int = 50 + lm: str = "openrouter/anthropic/claude-sonnet-4.5" + sub_lm: str = "openrouter/openai/gpt-5-mini" api_base: str = "https://openrouter.ai/api/v1" max_tokens: int = 32000 max_output_chars: int = 100000 @@ -291,13 +304,13 @@ class RLMCodingProgram(PrecompiledProgram): # tool logging for introspections on multi-turn conversations dspy.settings.configure(callbacks=[ToolLoggingCallback()]) - lm = dspy.LM( + self.lm = dspy.LM( self.config.lm, api_base=self.config.api_base, max_tokens=self.config.max_tokens, track_usage=self.config.track_usage, ) - sub_lm = dspy.LM( + self.sub_lm = dspy.LM( self.config.sub_lm, api_base=self.config.api_base, max_tokens=self.config.max_tokens, @@ -305,14 +318,13 @@ class RLMCodingProgram(PrecompiledProgram): ) agent = dspy.RLM( CodingAssistant, - sub_lm=sub_lm, + sub_lm=self.sub_lm, tools=self.tools, max_output_chars=self.config.max_output_chars, max_iterations=self.config.max_iters, verbose=self.config.verbose, ) - - agent.set_lm(lm) + agent.set_lm(self.lm) self.agent = agent def forward(self, task: str) -> str: @@ -321,17 +333,33 @@ class RLMCodingProgram(PrecompiledProgram): def get_tools(self): return self.tools - + def set_tool(self, name: str, tool: callable): self.tools[name] = tool + self.reload_repl_tools() def remove_tool(self, name: str): - del self.tools[name] + if name in self.tools: + del self.tools[name] + self.reload_repl_tools() + + def reload_repl_tools( + self, + ): # we need to create a new instance for tool mutations to be passed back into the REPL + new_instance = dspy.RLM( + CodingAssistant, + sub_lm=self.sub_lm, + tools=self.tools, + max_output_chars=self.config.max_output_chars, + max_iterations=self.config.max_iters, + verbose=self.config.verbose, + ) + new_instance.set_lm(self.lm) + self.agent = new_instance + def main(): - model = os.getenv("MODEL") - if model is None: - model = select_model() + model = select_model() # Add openrouter/ prefix if not already present if not model.startswith("openrouter/"): @@ -348,12 +376,35 @@ def main(): # Conversation history for context history = [] + # MCP servers registry + mcp_servers = {} + + def register_mcp_server(name, server): + tool_names = [] + for tool in server.tools: + tool_name = f"{name}_{tool.__name__}" + agent.set_tool(tool_name, tool) + tool_names.append(tool_name) + return tool_names + while True: try: print(separator()) user_input = input(f"{BOLD}{BLUE}❯{RESET} ").strip() print(separator()) + tmp_paste_path = None + if len(user_input) > LONG_PASTE_THRESHOLD: + tmp_paste_path = save_long_paste(user_input) + print( + f"{YELLOW}⏺ Long paste detected ({len(user_input)} chars). Saved to {tmp_paste_path}{RESET}" + ) + user_input = ( + f"The user pasted a long input ({len(user_input)} chars). " + f"It has been saved to {tmp_paste_path}. " + "Use read_file to view it. The file will be deleted after this response." + ) + if not user_input: continue if user_input in ("/q", "exit"): @@ -377,16 +428,32 @@ def main(): continue elif choice in AVAILABLE_MODELS: name, model_id = AVAILABLE_MODELS[choice] - new_model = model_id if model_id.startswith("openrouter/") else f"openrouter/{model_id}" + new_model = ( + model_id + if model_id.startswith("openrouter/") + else f"openrouter/{model_id}" + ) config.lm = new_model agent = RLMCodingProgram(config) + for server_name, info in mcp_servers.items(): + info["tools"] = register_mcp_server(server_name, info["server"]) print(f"{GREEN}⏺ Switched to: {name} ({new_model}){RESET}") elif choice == "c": - custom_model = input(f"{BOLD}{BLUE}❯{RESET} Enter model ID: ").strip() + custom_model = input( + f"{BOLD}{BLUE}❯{RESET} Enter model ID: " + ).strip() if custom_model: - new_model = custom_model if custom_model.startswith("openrouter/") else f"openrouter/{custom_model}" + new_model = ( + custom_model + if custom_model.startswith("openrouter/") + else f"openrouter/{custom_model}" + ) config.lm = new_model agent = RLMCodingProgram(config) + for server_name, info in mcp_servers.items(): + info["tools"] = register_mcp_server( + server_name, info["server"] + ) print(f"{GREEN}⏺ Switched to custom model: {new_model}{RESET}") else: print(f"{RED}⏺ Invalid model ID, keeping current model{RESET}") @@ -394,6 +461,97 @@ def main(): print(f"{RED}⏺ Invalid choice, keeping current model{RESET}") continue + if user_input.startswith("/add-mcp"): + parts = shlex.split(user_input) + args = parts[1:] + if not args: + print( + f"{YELLOW}⏺ Usage: /add-mcp [--auth |--oauth] [--headers ''] [--auto-auth|--no-auto-auth]{RESET}" + ) + continue + + name = None + auth = None + headers = None + auto_auth = None + positional = [] + i = 0 + while i < len(args): + if args[i] in ("--name", "-n") and i + 1 < len(args): + name = args[i + 1] + i += 2 + elif args[i].startswith("--auth="): + auth = args[i].split("=", 1)[1] + i += 1 + elif args[i] == "--auth" and i + 1 < len(args): + auth = args[i + 1] + i += 2 + elif args[i] == "--oauth": + auth = "oauth" + i += 1 + elif args[i] == "--auto-auth": + auto_auth = True + i += 1 + elif args[i] == "--no-auto-auth": + auto_auth = False + i += 1 + elif args[i].startswith("--headers="): + headers = json.loads(args[i].split("=", 1)[1]) + i += 1 + elif args[i] == "--headers" and i + 1 < len(args): + headers = json.loads(args[i + 1]) + i += 2 + else: + positional.append(args[i]) + i += 1 + + server_cmd = None + if positional: + if name is None and len(positional) >= 2: + name = positional[0] + server_cmd = " ".join(positional[1:]) + else: + server_cmd = " ".join(positional) + + if not server_cmd: + print( + f"{YELLOW}⏺ Usage: /add-mcp [--auth |--oauth] [--headers ''] [--auto-auth|--no-auto-auth]{RESET}" + ) + continue + + if not name: + name = re.sub(r"[^a-zA-Z0-9_]+", "_", server_cmd).strip("_") + if not name: + name = f"mcp_{len(mcp_servers) + 1}" + + if name in mcp_servers: + for tool_name in mcp_servers[name]["tools"]: + agent.remove_tool(tool_name) + + try: + from mcp2py import load + + kwargs = {} + if auth is not None: + kwargs["auth"] = auth + if headers: + kwargs["headers"] = headers + if auto_auth is not None: + kwargs["auto_auth"] = auto_auth + + server = load(server_cmd, **kwargs) + tool_names = register_mcp_server(name, server) + mcp_servers[name] = {"server": server, "tools": tool_names} + + print( + f"{GREEN}⏺ Added MCP server '{name}' with {len(tool_names)} tools{RESET}" + ) + print(f"{GREEN}⏺ Tools: {list(agent.tools.keys())}{RESET}") + except Exception as err: + print(f"{RED}⏺ Failed to add MCP server: {err}{RESET}") + + continue + # Build context from history context = f"Working directory: {os.getcwd()}\n" if history: @@ -406,11 +564,18 @@ def main(): print(f"\n{CYAN}⏺{RESET} Thinking...", flush=True) # Run the RLM agent - result = agent(task=task) + try: + result = agent(task=task) + finally: + if tmp_paste_path: + try: + os.remove(tmp_paste_path) + except OSError: + pass # Display the answer print(f"\n{CYAN}⏺{RESET} {render_markdown(result.answer)}") - + # Display usage print(f"\n{MAGENTA}⏺ Debug Prediction: {result}{RESET}") @@ -430,5 +595,5 @@ def main(): if __name__ == "__main__": agent = RLMCodingProgram(RLMCodingConfig()) - agent.push_to_hub(MODAIC_REPO_PATH, commit_message="Switch to RLM instead of ReAct", tag="v0.0.3") + agent.push_to_hub(MODAIC_REPO_PATH, commit_message="Add MCP server support and long paste handling", tag="v0.0.4") #main() diff --git a/program.json b/program.json index 01270e8..5b0165e 100644 --- a/program.json +++ b/program.json @@ -29,7 +29,7 @@ ] }, "lm": { - "model": "openrouter/anthropic/claude-3.5-sonnet", + "model": "openrouter/anthropic/claude-sonnet-4.5", "model_type": "chat", "cache": true, "num_retries": 3, @@ -68,7 +68,7 @@ ] }, "lm": { - "model": "openrouter/anthropic/claude-3.5-sonnet", + "model": "openrouter/anthropic/claude-sonnet-4.5", "model_type": "chat", "cache": true, "num_retries": 3, diff --git a/pyproject.toml b/pyproject.toml index 77f6006..034abc9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,4 +4,4 @@ version = "0.1.0" description = "Add your description here" readme = "README.md" requires-python = ">=3.13" -dependencies = ["dspy>=3.1.2", "modaic>=0.10.4"] +dependencies = ["dspy>=3.1.2", "fastmcp>=2.14.3", "mcp2py>=0.6.0", "modaic>=0.10.4"]