-
Notifications
You must be signed in to change notification settings - Fork 232
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore(sample-app): langgraph example (#1878)
- Loading branch information
Showing
3 changed files
with
128 additions
and
66 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
from typing import Literal | ||
|
||
from langchain_core.messages import HumanMessage | ||
from langchain_openai import ChatOpenAI | ||
from langchain_core.tools import tool | ||
from langgraph.checkpoint.memory import MemorySaver | ||
from langgraph.graph import END, StateGraph, MessagesState | ||
from langgraph.prebuilt import ToolNode | ||
|
||
from traceloop.sdk import Traceloop | ||
from traceloop.sdk.decorators import workflow as traceloop_workflow | ||
|
||
Traceloop.init(app_name="langgraph_example") | ||
|
||
|
||
# Define the tools for the agent to use | ||
@tool | ||
def search(query: str): | ||
"""Call to surf the web.""" | ||
# This is a placeholder, but don't tell the LLM that... | ||
if "sf" in query.lower() or "san francisco" in query.lower(): | ||
return "It's 60 degrees and foggy." | ||
return "It's 90 degrees and sunny." | ||
|
||
|
||
tools = [search] | ||
|
||
tool_node = ToolNode(tools) | ||
|
||
model = ChatOpenAI(model="gpt-4o", temperature=0).bind_tools(tools) | ||
|
||
|
||
# Define the function that determines whether to continue or not | ||
def should_continue(state: MessagesState) -> Literal["tools", END]: | ||
messages = state["messages"] | ||
last_message = messages[-1] | ||
# If the LLM makes a tool call, then we route to the "tools" node | ||
if last_message.tool_calls: | ||
return "tools" | ||
# Otherwise, we stop (reply to the user) | ||
return END | ||
|
||
|
||
# Define the function that calls the model | ||
def call_model(state: MessagesState): | ||
messages = state["messages"] | ||
response = model.invoke(messages) | ||
# We return a list, because this will get added to the existing list | ||
return {"messages": [response]} | ||
|
||
|
||
# Define a new graph | ||
workflow = StateGraph(MessagesState) | ||
|
||
# Define the two nodes we will cycle between | ||
workflow.add_node("agent", call_model) | ||
workflow.add_node("tools", tool_node) | ||
|
||
# Set the entrypoint as `agent` | ||
# This means that this node is the first one called | ||
workflow.set_entry_point("agent") | ||
|
||
# We now add a conditional edge | ||
workflow.add_conditional_edges( | ||
# First, we define the start node. We use `agent`. | ||
# This means these are the edges taken after the `agent` node is called. | ||
"agent", | ||
# Next, we pass in the function that will determine which node is called next. | ||
should_continue, | ||
) | ||
|
||
# We now add a normal edge from `tools` to `agent`. | ||
# This means that after `tools` is called, `agent` node is called next. | ||
workflow.add_edge("tools", "agent") | ||
|
||
# Initialize memory to persist state between graph runs | ||
checkpointer = MemorySaver() | ||
|
||
# Finally, we compile it! | ||
# This compiles it into a LangChain Runnable, | ||
# meaning you can use it as you would any other runnable. | ||
# Note that we're (optionally) passing the memory when compiling the graph | ||
app = workflow.compile(checkpointer=checkpointer) | ||
|
||
|
||
@traceloop_workflow() | ||
def run_app(): | ||
# Use the Runnable | ||
final_state = app.invoke( | ||
{"messages": [HumanMessage(content="what is the weather in sf in Celsius")]}, | ||
config={"configurable": {"thread_id": 42}}, | ||
) | ||
|
||
print(final_state["messages"][-1].content) | ||
|
||
|
||
run_app() |