from langgraph.graph import StateGraph, MessagesState, START, END
from langgraph.prebuilt import ToolNode
from langchain_openai import ChatOpenAI
from langchain_core.tools import tool
# Define tools
@tool
def get_weather(location: str) -> str:
"""Get the current weather for a location."""
# Weather API call
return f"Weather in {location}: 72°F, sunny"
tools = [get_weather]
# Initialize model with tools
model = ChatOpenAI(
api_key="your-adaptive-api-key",
base_url="https://llmadaptive.uk/api/v1",
model="",
temperature=0,
).bind_tools(tools)
# Define the agent function
def call_model(state: MessagesState):
response = model.invoke(state["messages"])
return {"messages": [response]}
# Routing function
def should_continue(state: MessagesState):
messages = state["messages"]
last_message = messages[-1]
if last_message.tool_calls:
return "tools"
return END
# Create the graph
workflow = StateGraph(MessagesState)
workflow.add_node("agent", call_model)
workflow.add_node("tools", ToolNode(tools))
workflow.add_edge(START, "agent")
workflow.add_conditional_edges("agent", should_continue, ["tools", END])
workflow.add_edge("tools", "agent")
app = workflow.compile()
# Use the agent
result = app.invoke({
"messages": [{"role": "user", "content": "What's the weather in San Francisco?"}]
})