import uuid
from typing_extensions import TypedDict, NotRequired
from langgraph.graph import StateGraph, START, END
from langchain.chat_models import init_chat_model
from langgraph.checkpoint.memory import InMemorySaver
class State(TypedDict):
topic: NotRequired[str]
joke: NotRequired[str]
llm = init_chat_model(
"anthropic:claude-3-7-sonnet-latest",
temperature=0,
)
def generate_topic(state: State):
"""调用 LLM 生成笑话主题"""
msg = llm.invoke("给我一个有趣的笑话主题")
return {"topic": msg.content}
def write_joke(state: State):
"""调用 LLM 根据主题编写笑话"""
msg = llm.invoke(f"写一个关于 {state['topic']} 的短笑话")
return {"joke": msg.content}
# 构建工作流
workflow = StateGraph(State)
# 添加节点
workflow.add_node("generate_topic", generate_topic)
workflow.add_node("write_joke", write_joke)
# 添加边以连接节点
workflow.add_edge(START, "generate_topic")
workflow.add_edge("generate_topic", "write_joke")
workflow.add_edge("write_joke", END)
# 编译
checkpointer = InMemorySaver()
graph = workflow.compile(checkpointer=checkpointer)
graph