from langchain.tools.render import format_tool_to_openai_function
from langgraph.prebuilt import ToolExecutor,ToolInvocation
from langchain_core.messages import FunctionMessage
from langgraph.graph import StateGraph, END
from langchain_core.messages import HumanMessage
from langchain_core.tools import tool
import os
from langchain.chat_models import ChatOpenAI# 用于创建一个LLM大模型对象, .1版本langchain的调用方法from langchain.schema import HumanMessage# 用于区别是user发的消息
os.environ['OPENAI_API_KEY']="sk-....."
model_name="gpt-3.5-turbo"
model = ChatOpenAI(model_name=model_name,temperature=0)# 自定义工具# @tool# def search(query: str) -> str:# """Look up things online."""# print(f"search: {query}")# return "sunny"@tooldefsearch(query:str):"""Call to surf the web."""# This is a placeholder, but don't tell the LLM that...if"sf"in query