Skip to content

Commit e530585

Browse files
committed
feat: add Anthropic/Claude support and make agent provider-agnostic
- added langchain-anthropic as optional dependency - replaced manual openai-specific agent construction with create_tool_calling_agent which works with any provider that supports tool calling (openai, anthropic, ollama, etc) - removed the format_to_openai_tool_messages and OpenAIToolsAgentOutputParser imports since they're no longer needed - updated turtle_agent llm.py to support switching between openai, anthropic and ollama via LLM_PROVIDER env var - updated .env example with anthropic and ollama config options - ChatAnthropic import is optional so existing installs without langchain-anthropic wont break this came up because a few people asked about claude support (see #56) and the fix was pretty straightforward since langchain already has the abstractions for it
1 parent af8553b commit e530585

File tree

4 files changed

+68
-26
lines changed

4 files changed

+68
-26
lines changed

.env

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,16 @@
1+
# LLM Provider: "openai" (default), "anthropic", or "ollama"
2+
LLM_PROVIDER=openai
3+
14
# OpenAI Configuration
25
OPENAI_API_KEY=
36
OPENAI_API_VERSION=
4-
OPENAI_API_TYPE=
7+
OPENAI_API_TYPE=
8+
OPENAI_MODEL=gpt-4o
9+
10+
# Anthropic Configuration
11+
ANTHROPIC_API_KEY=
12+
ANTHROPIC_MODEL=claude-sonnet-4-20250514
13+
14+
# Ollama Configuration (local models)
15+
OLLAMA_MODEL=llama3
16+
OLLAMA_BASE_URL=http://localhost:11434

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ dependencies = [
3030
"langchain-core~=0.3.52",
3131
"langchain-openai~=0.3.14",
3232
"langchain-ollama~=0.3.2",
33+
"langchain-anthropic~=0.3.12",
3334
"pydantic",
3435
"pyinputplus",
3536
"azure-identity",

src/rosa/rosa.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -14,22 +14,25 @@
1414

1515
from typing import Any, AsyncIterable, Dict, Literal, Optional, Union
1616

17-
from langchain.agents import AgentExecutor
18-
from langchain.agents.format_scratchpad.openai_tools import (
19-
format_to_openai_tool_messages,
20-
)
21-
from langchain.agents.output_parsers.openai_tools import OpenAIToolsAgentOutputParser
17+
from langchain.agents import AgentExecutor, create_tool_calling_agent
2218
from langchain.prompts import MessagesPlaceholder
2319
from langchain_community.callbacks import get_openai_callback
2420
from langchain_core.messages import AIMessage, HumanMessage
2521
from langchain_core.prompts import ChatPromptTemplate
2622
from langchain_ollama import ChatOllama
2723
from langchain_openai import AzureChatOpenAI, ChatOpenAI
2824

25+
try:
26+
from langchain_anthropic import ChatAnthropic
27+
except ImportError:
28+
ChatAnthropic = None
29+
2930
from .prompts import RobotSystemPrompts, system_prompts
3031
from .tools import ROSATools
3132

3233
ChatModel = Union[ChatOpenAI, AzureChatOpenAI, ChatOllama]
34+
if ChatAnthropic is not None:
35+
ChatModel = Union[ChatOpenAI, AzureChatOpenAI, ChatOllama, ChatAnthropic]
3336

3437

3538
class ROSA:
@@ -38,7 +41,8 @@ class ROSA:
3841
3942
Args:
4043
ros_version (Literal[1, 2]): The version of ROS that the agent will interact with.
41-
llm (Union[AzureChatOpenAI, ChatOpenAI, ChatOllama]): The language model to use for generating responses.
44+
llm (ChatModel): The language model to use for generating responses. Supports OpenAI, Azure OpenAI,
45+
Ollama and Anthropic (Claude) models.
4246
tools (Optional[list]): A list of additional LangChain tool functions to use with the agent.
4347
tool_packages (Optional[list]): A list of Python packages containing LangChain tool functions to use.
4448
prompts (Optional[RobotSystemPrompts]): Custom prompts to use with the agent.
@@ -95,7 +99,6 @@ def __init__(
9599
ros_version, packages=tool_packages, tools=tools, blacklist=self.__blacklist
96100
)
97101
self.__prompts = self._get_prompts(prompts)
98-
self.__llm_with_tools = self.__llm.bind_tools(self.__tools.get_tools())
99102
self.__agent = self._get_agent()
100103
self.__executor = self._get_executor(verbose=verbose)
101104
self.__show_token_usage = show_token_usage if not streaming else False
@@ -245,18 +248,15 @@ def _get_executor(self, verbose: bool) -> AgentExecutor:
245248
return executor
246249

247250
def _get_agent(self):
248-
"""Create and return an agent for processing user inputs and generating responses."""
249-
agent = (
250-
{
251-
"input": lambda x: x["input"],
252-
"agent_scratchpad": lambda x: format_to_openai_tool_messages(
253-
x["intermediate_steps"]
254-
),
255-
"chat_history": lambda x: x["chat_history"],
256-
}
257-
| self.__prompts
258-
| self.__llm_with_tools
259-
| OpenAIToolsAgentOutputParser()
251+
"""Create and return an agent for processing user inputs and generating responses.
252+
253+
Uses create_tool_calling_agent which is provider-agnostic and works with
254+
any LLM that supports tool calling (OpenAI, Anthropic, Ollama, etc).
255+
"""
256+
agent = create_tool_calling_agent(
257+
llm=self.__llm,
258+
tools=self.__tools.get_tools(),
259+
prompt=self.__prompts,
260260
)
261261
return agent
262262

src/turtle_agent/scripts/llm.py

Lines changed: 35 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,43 @@
1919

2020

2121
def get_llm(streaming: bool = False):
22-
"""A helper function to get the LLM instance."""
22+
"""A helper function to get the LLM instance.
23+
24+
Supports OpenAI (default), Anthropic and Ollama models.
25+
Set the LLM_PROVIDER env variable to switch between providers:
26+
- "openai" (default): uses OPENAI_API_KEY
27+
- "anthropic": uses ANTHROPIC_API_KEY
28+
- "ollama": uses local Ollama instance
29+
"""
2330
dotenv.load_dotenv(dotenv.find_dotenv())
2431

25-
llm = ChatOpenAI(
26-
api_key=get_env_variable("OPENAI_API_KEY"),
27-
model="gpt-5.1",
28-
streaming=streaming,
29-
)
32+
provider = os.getenv("LLM_PROVIDER", "openai").lower()
33+
34+
if provider == "anthropic":
35+
try:
36+
from langchain_anthropic import ChatAnthropic
37+
except ImportError:
38+
raise ImportError(
39+
"langchain-anthropic is required for Anthropic support. "
40+
"Install it with: pip install langchain-anthropic"
41+
)
42+
llm = ChatAnthropic(
43+
api_key=get_env_variable("ANTHROPIC_API_KEY"),
44+
model=os.getenv("ANTHROPIC_MODEL", "claude-sonnet-4-20250514"),
45+
streaming=streaming,
46+
)
47+
elif provider == "ollama":
48+
from langchain_ollama import ChatOllama
49+
llm = ChatOllama(
50+
model=os.getenv("OLLAMA_MODEL", "llama3"),
51+
base_url=os.getenv("OLLAMA_BASE_URL", "http://localhost:11434"),
52+
)
53+
else:
54+
llm = ChatOpenAI(
55+
api_key=get_env_variable("OPENAI_API_KEY"),
56+
model=os.getenv("OPENAI_MODEL", "gpt-4o"),
57+
streaming=streaming,
58+
)
3059

3160
return llm
3261

0 commit comments

Comments
 (0)