当前位置:   article > 正文

LangChain pdf的读取以及向量数据库的使用_chatglm3 langchain 文档读取并存储为向量

chatglm3 langchain 文档读取并存储为向量

参考: https://www.pinecone.io/learn/series/langchain/langchain-expression-language/

以下使用了3399.pdf, Rockchip RK3399 TRM Part1

import ChatGLM
from langchain.chains import LLMChain
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain.chains import SimpleSequentialChain
from langchain_core.runnables import RunnablePassthrough
from operator import itemgetter
from langchain_community.document_loaders import PyPDFLoader
import ChatGLM
from langchain.chains import LLMChain
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain.chains import LLMMathChain
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.document_loaders import TextLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain_community.vectorstores import Chroma
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableParallel, RunnablePassthrough
from langchain_community.embeddings import JinaEmbeddings

# https://jina.ai/embeddings/
# https://python.langchain.com/docs/integrations/text_embedding/jina
# demo:  https://python.langchain.com/cookbook



llm = ChatGLM.ChatGLM_LLM()
loader = PyPDFLoader("3399.pdf")
documents = loader.load_and_split()

embeddings = JinaEmbeddings(
    jina_api_key="jina_fa2c341a2f634f1381f7cfec767150caSconYmQA2XRAcVKfZ7-Zboaqeydu", model_name="jina-embeddings-v2-base-en"
)

vectorstore = Chroma.from_documents(documents, embeddings)
retriever = vectorstore.as_retriever()

template = """Answer the question based only on the following context:
{context}

Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
llm = ChatGLM.ChatGLM_LLM()
output_parser = StrOutputParser()
setup_and_retrieval = RunnableParallel(
    {"context": retriever, "question": RunnablePassthrough()}
)
chain = setup_and_retrieval | prompt | llm | output_parser

print(chain.invoke("eFuse Function Description"))





  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60

更新之后的版本:

import ChatGLM

from langchain_community.document_loaders import PyPDFLoader

from langchain_community.vectorstores import Chroma
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_community.embeddings import JinaEmbeddings
from langchain_core.runnables import RunnableParallel, RunnablePassthrough


llm = ChatGLM.ChatGLM_LLM()
loader = PyPDFLoader("西游记.pdf")
documents = loader.load_and_split()

embeddings = JinaEmbeddings(
    jina_api_key="jina_c5d02a61c97d4d79b88234362726e94aVLMTvF38wvrElYqpGYSxFtC5Ifhj", model_name="jina-embeddings-v2-base-zh"
)

# 第一次存入本地
# vectorstore = Chroma.from_documents(documents, embeddings,persist_directory="./chroma_db")

# 从本地加载
vectorstore = Chroma(persist_directory="./chroma_db", embedding_function=embeddings)

retriever = vectorstore.as_retriever()
template = """Answer the question based only on the following context,if can not ,please just say: I do not know,
please think step by step:
{context}

Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
llm = ChatGLM.ChatGLM_LLM()
output_parser = StrOutputParser()
setup_and_retrieval = RunnableParallel(
    {"context": retriever, "question": RunnablePassthrough()}
)
chain = setup_and_retrieval | prompt | llm | output_parser
# print(chain.invoke("介绍下红楼梦"))
print(chain.invoke("第二十二回讲了什么"))

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/Gausst松鼠会/article/detail/688100
推荐阅读
相关标签
  

闽ICP备14008679号