Skip to content

Commit

Permalink
chore(sample-app): langgraph example (#1878)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Aug 24, 2024
1 parent 42ee945 commit 1e3ce76
Show file tree
Hide file tree
Showing 3 changed files with 128 additions and 66 deletions.
96 changes: 30 additions & 66 deletions packages/sample-app/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions packages/sample-app/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ llama-index-vector-stores-chroma = "^0.1.6"
langchain-openai ="^0.1.8"
google-generativeai = "^0.6.0"
langchain-ibm = "^0.1.11"
langgraph = "^0.2.14"

[tool.poetry.dependencies.opentelemetry-instrumentation-openai]
path = "../opentelemetry-instrumentation-openai"
Expand Down
97 changes: 97 additions & 0 deletions packages/sample-app/sample_app/langgraph_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
from typing import Literal

from langchain_core.messages import HumanMessage
from langchain_openai import ChatOpenAI
from langchain_core.tools import tool
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import END, StateGraph, MessagesState
from langgraph.prebuilt import ToolNode

from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import workflow as traceloop_workflow

Traceloop.init(app_name="langgraph_example")


# Define the tools for the agent to use
@tool
def search(query: str):
"""Call to surf the web."""
# This is a placeholder, but don't tell the LLM that...
if "sf" in query.lower() or "san francisco" in query.lower():
return "It's 60 degrees and foggy."
return "It's 90 degrees and sunny."


tools = [search]

tool_node = ToolNode(tools)

model = ChatOpenAI(model="gpt-4o", temperature=0).bind_tools(tools)


# Define the function that determines whether to continue or not
def should_continue(state: MessagesState) -> Literal["tools", END]:
messages = state["messages"]
last_message = messages[-1]
# If the LLM makes a tool call, then we route to the "tools" node
if last_message.tool_calls:
return "tools"
# Otherwise, we stop (reply to the user)
return END


# Define the function that calls the model
def call_model(state: MessagesState):
messages = state["messages"]
response = model.invoke(messages)
# We return a list, because this will get added to the existing list
return {"messages": [response]}


# Define a new graph
workflow = StateGraph(MessagesState)

# Define the two nodes we will cycle between
workflow.add_node("agent", call_model)
workflow.add_node("tools", tool_node)

# Set the entrypoint as `agent`
# This means that this node is the first one called
workflow.set_entry_point("agent")

# We now add a conditional edge
workflow.add_conditional_edges(
# First, we define the start node. We use `agent`.
# This means these are the edges taken after the `agent` node is called.
"agent",
# Next, we pass in the function that will determine which node is called next.
should_continue,
)

# We now add a normal edge from `tools` to `agent`.
# This means that after `tools` is called, `agent` node is called next.
workflow.add_edge("tools", "agent")

# Initialize memory to persist state between graph runs
checkpointer = MemorySaver()

# Finally, we compile it!
# This compiles it into a LangChain Runnable,
# meaning you can use it as you would any other runnable.
# Note that we're (optionally) passing the memory when compiling the graph
app = workflow.compile(checkpointer=checkpointer)


@traceloop_workflow()
def run_app():
# Use the Runnable
final_state = app.invoke(
{"messages": [HumanMessage(content="what is the weather in sf in Celsius")]},
config={"configurable": {"thread_id": 42}},
)

print(final_state["messages"][-1].content)


run_app()

0 comments on commit 1e3ce76

Please sign in to comment.