当前位置:   article > 正文

常见大模型API python调用一览_python调用大模型api

python调用大模型api

1. openai

  1. from openai import OpenAI
  2. def chatgpt_api(query):
  3. client = OpenAI(
  4. base_url='https://api.openai-proxy.live/v1',
  5. api_key='sk-xxxxxxxxxxxxxxxxxxx',
  6. )
  7. chat_completion = client.chat.completions.create(
  8. messages=[
  9. {
  10. "role": "user",
  11. "content":f"{query}",
  12. }
  13. ],
  14. model="gpt-3.5-turbo",
  15. )
  16. return chat_completion.choices[0].message.content
  17. print(chatgpt_api('你是谁'))

2. chatglm

  1. import requests
  2. def chatglm_api(query):
  3. url = "https://api.siliconflow.cn/v1/chat/completions"
  4. payload = {
  5. "model": "THUDM/glm-4-9b-chat",
  6. "messages": [
  7. {
  8. "role": "user",
  9. "content": f"{query}"
  10. }
  11. ]
  12. }
  13. headers = {
  14. "accept": "application/json",
  15. "content-type": "application/json",
  16. "authorization": "Bearer sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
  17. }
  18. response = requests.post(url, json=payload, headers=headers)
  19. return eval(response.text)['choices']
  20. print(chatglm_api('你是谁啊'))

3. Qwen

  1. import requests
  2. def qwen_api(query):
  3. url = "https://api.siliconflow.cn/v1/chat/completions"
  4. payload = {
  5. "model": "Qwen/Qwen2-72B-Instruct",
  6. "messages": [
  7. {
  8. "role": "user",
  9. "content": f"{query}"
  10. }
  11. ]
  12. }
  13. headers = {
  14. "accept": "application/json",
  15. "content-type": "application/json",
  16. "authorization": "Bearer sk-xxxxxxxxxxxxxxxxxxx"
  17. }
  18. response = requests.post(url, json=payload, headers=headers)
  19. return eval(response.text)['choices']
  20. print(chatglm_api('你是谁啊'))

4. Baichuan

  1. def baichuan_api(query):
  2. api_key = 'sk-xxxxxxxxxxxxxxxxxxxxxxxxx'
  3. # 设置请求的URL、头部信息和请求体
  4. url = "https://api.baichuan-ai.com/v1/chat/completions"
  5. headers = {
  6. "Content-Type": "application/json",
  7. "Authorization": f"Bearer {api_key}"
  8. }
  9. data = {
  10. "model": "Baichuan2-Turbo",
  11. "messages": [
  12. {
  13. "role": "user",
  14. "content": f"{query}"
  15. }
  16. ],
  17. "temperature": 0.3,
  18. "top_p": 0.85,
  19. "max_tokens": 2048,
  20. "with_search_enhance": True,
  21. "knowledge_base": {
  22. "ids": []
  23. },
  24. "stream": False
  25. }
  26. # 发送POST请求
  27. response = requests.post(url, headers=headers, json=data)
  28. # 检查请求是否成功
  29. if response.status_code == 200:
  30. # 输出响应内容
  31. return response.json()
  32. else:
  33. # 输出错误信息
  34. print(f"Error: {response.status_code} - {response.text}")
  35. return None

5. 01

  1. import requests
  2. def onezero_api(query):
  3. url = "https://api.siliconflow.cn/v1/chat/completions"
  4. payload = {
  5. "model": "01-ai/Yi-1.5-34B-Chat-16K",
  6. "messages": [
  7. {
  8. "role": "user",
  9. "content": f"{query}"
  10. }
  11. ]
  12. }
  13. headers = {
  14. "accept": "application/json",
  15. "content-type": "application/json",
  16. "authorization": "Bearer sk-xxxxxxxxxxxxxxxxxxx"
  17. }
  18. response = requests.post(url, json=payload, headers=headers)
  19. return eval(response.text)['choices']
  20. print(chatglm_api('你是谁啊'))

6. 星火认知

  1. pip install --upgrade spark_ai_python
  2. from sparkai.llm.llm import ChatSparkLLM, ChunkPrintHandler
  3. from sparkai.core.messages import ChatMessage
  4. #星火认知大模型Spark Max的URL值,其他版本大模型URL值请前往文档(https://www.xfyun.cn/doc/spark/Web.html)查看
  5. SPARKAI_URL = 'wss://spark-api.xf-yun.com/v3.5/chat'
  6. #星火认知大模型调用秘钥信息,请前往讯飞开放平台控制台(https://console.xfyun.cn/services/bm35)查看
  7. SPARKAI_APP_ID = ''
  8. SPARKAI_API_SECRET = ''
  9. SPARKAI_API_KEY = ''
  10. #星火认知大模型Spark Max的domain值,其他版本大模型domain值请前往文档(https://www.xfyun.cn/doc/spark/Web.html)查看
  11. SPARKAI_DOMAIN = 'generalv3.5'
  12. if __name__ == '__main__':
  13. spark = ChatSparkLLM(
  14. spark_api_url=SPARKAI_URL,
  15. spark_app_id=SPARKAI_APP_ID,
  16. spark_api_key=SPARKAI_API_KEY,
  17. spark_api_secret=SPARKAI_API_SECRET,
  18. spark_llm_domain=SPARKAI_DOMAIN,
  19. streaming=False,
  20. )
  21. messages = [ChatMessage(
  22. role="user",
  23. content='你好呀'
  24. )]
  25. handler = ChunkPrintHandler()
  26. a = spark.generate([messages], callbacks=[handler])
  27. print(a)

7. Deepseek

  1. import requests
  2. def seepseek_api(query):
  3. url = "https://api.siliconflow.cn/v1/chat/completions"
  4. payload = {
  5. "model": "deepseek-ai/DeepSeek-V2-Chat",
  6. "messages": [
  7. {
  8. "role": "user",
  9. "content": f"{query}"
  10. }
  11. ]
  12. }
  13. headers = {
  14. "accept": "application/json",
  15. "content-type": "application/json",
  16. "authorization": "Bearer sk-xxxxxxxxxxxxxxxxxxx"
  17. }
  18. response = requests.post(url, json=payload, headers=headers)
  19. return eval(response.text)['choices']
  20. print(chatglm_api('你是谁啊'))

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/从前慢现在也慢/article/detail/994789
推荐阅读
相关标签
  

闽ICP备14008679号