赞
踩
以下使用的模型是:google/flan-t5-xxl
from langchain.chains import LLMChain from langchain_core.prompts import ChatPromptTemplate from langchain import hub from langchain_community.llms import HuggingFaceHub from langchain_community.llms import HuggingFaceTextGenInference import os os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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" prompt = ChatPromptTemplate.from_template(""" Question: {input} Answer: Let's think step by step """ ) repo_id = "google/flan-t5-xxl" # See https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads for some other options # repo_id = "databricks/dolly-v2-3b" llm = HuggingFaceHub( repo_id= repo_id, # repo_id="LLM360/CrystalChat", model_kwargs={"temperature":0.2, "max_length":18000} ) chain = LLMChain(llm=llm,prompt=prompt) print(chain.invoke("In the first movie of Harry Potter, what is the name of the three-headed dog? "))
确认可以使用的:
HuggingFaceH4/zephyr-7b-beta
# repo_id="bigscience/bloomz-560m",
repo_id="google-t5/t5-small",
openchat/openchat-3.5-0106
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。