1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
| from typing_extensions import TypedDict, Literal
from langchain_openai import ChatOpenAI from langchain_core.messages import AnyMessage from langgraph.graph import MessagesState, StateGraph, START, END from langgraph.types import Command
model = ChatOpenAI(model="gpt-4o")
def call_llm(messages: list[AnyMessage], target_agent_nodes: list[str]): """Call LLM with structured output to get a natural language response as well as a target agent (node) to go to next.
Args: messages: list of messages to pass to the LLM target_agents: list of the node names of the target agents to navigate to """ json_schema = { "name": "Response", "parameters": { "type": "object", "properties": { "response": { "type": "string", "description": "A human readable response to the original question. Does not need to be a final response. Will be streamed back to the user.", }, "goto": { "enum": [*target_agent_nodes, "__end__"], "type": "string", "description": "The next agent to call, or __end__ if the user's query has been resolved. Must be one of the specified values.", }, }, "required": ["response", "goto"], }, } response = model.with_structured_output(json_schema).invoke(messages) return response
def travel_advisor( state: MessagesState, ) -> Command[Literal["sightseeing_advisor", "hotel_advisor", "__end__"]]: system_prompt = ( "You are a general travel expert that can recommend travel destinations (e.g. countries, cities, etc). " "If you need specific sightseeing recommendations, ask 'sightseeing_advisor' for help. " "If you need hotel recommendations, ask 'hotel_advisor' for help. " "If you have enough information to respond to the user, return 'finish'. " "Never mention other agents by name." ) messages = [{"role": "system", "content": system_prompt}] + state["messages"] target_agent_nodes = ["sightseeing_advisor", "hotel_advisor"] response = call_llm(messages, target_agent_nodes) ai_msg = {"role": "ai", "content": response["response"], "name": "travel_advisor"} return Command(goto=response["goto"], update={"messages": ai_msg})
def sightseeing_advisor( state: MessagesState, ) -> Command[Literal["travel_advisor", "hotel_advisor", "__end__"]]: system_prompt = ( "You are a travel expert that can provide specific sightseeing recommendations for a given destination. " "If you need general travel help, go to 'travel_advisor' for help. " "If you need hotel recommendations, go to 'hotel_advisor' for help. " "If you have enough information to respond to the user, return 'finish'. " "Never mention other agents by name." ) messages = [{"role": "system", "content": system_prompt}] + state["messages"] target_agent_nodes = ["travel_advisor", "hotel_advisor"] response = call_llm(messages, target_agent_nodes) ai_msg = { "role": "ai", "content": response["response"], "name": "sightseeing_advisor", } return Command(goto=response["goto"], update={"messages": ai_msg})
def hotel_advisor( state: MessagesState, ) -> Command[Literal["travel_advisor", "sightseeing_advisor", "__end__"]]: system_prompt = ( "You are a travel expert that can provide hotel recommendations for a given destination. " "If you need general travel help, ask 'travel_advisor' for help. " "If you need specific sightseeing recommendations, ask 'sightseeing_advisor' for help. " "If you have enough information to respond to the user, return 'finish'. " "Never mention other agents by name." ) messages = [{"role": "system", "content": system_prompt}] + state["messages"] target_agent_nodes = ["travel_advisor", "sightseeing_advisor"] response = call_llm(messages, target_agent_nodes) ai_msg = {"role": "ai", "content": response["response"], "name": "hotel_advisor"} return Command(goto=response["goto"], update={"messages": ai_msg})
builder = StateGraph(MessagesState) builder.add_node("travel_advisor", travel_advisor) builder.add_node("sightseeing_advisor", sightseeing_advisor) builder.add_node("hotel_advisor", hotel_advisor)
builder.add_edge(START, "travel_advisor")
graph = builder.compile()
|