当前位置:   article > 正文

chatglm3的api调用_启动chatglm3的api服务报错

启动chatglm3的api服务报错
conda activate chatglm3
cd openai_api_demo
python openai_api.py

启动ok,然后内网映射后

anaconda启动jupyter

!pip install openai==1.6.1 -i https://pypi.tuna.tsinghua.edu.cn/simple/

  1. """
  2. This script is an example of using the OpenAI API to create various interactions with a ChatGLM3 model. It includes functions to:
  3. 1. Conduct a basic chat session, asking about weather conditions in multiple cities.
  4. 2. Initiate a simple chat in Chinese, asking the model to tell a short story.
  5. 3. Retrieve and print embeddings for a given text input.
  6. Each function demonstrates a different aspect of the API's capabilities, showcasing how to make requests and handle responses.
  7. """
  8. import os
  9. from openai import OpenAI
  10. base_url = "https://16h5v06565.zicp.fun/v1/"
  11. client = OpenAI(api_key="EMPTY", base_url=base_url)
  1. def function_chat():
  2. messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}]
  3. tools = [
  4. {
  5. "type": "function",
  6. "function": {
  7. "name": "get_current_weather",
  8. "description": "Get the current weather in a given location",
  9. "parameters": {
  10. "type": "object",
  11. "properties": {
  12. "location": {
  13. "type": "string",
  14. "description": "The city and state, e.g. San Francisco, CA",
  15. },
  16. "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
  17. },
  18. "required": ["location"],
  19. },
  20. },
  21. }
  22. ]
  23. response = client.chat.completions.create(
  24. model="chatglm3-6b",
  25. messages=messages,
  26. tools=tools,
  27. tool_choice="auto",
  28. )
  29. if response:
  30. content = response.choices[0].message.content
  31. print(content)
  32. else:
  33. print("Error:", response.status_code)
  34. def simple_chat(use_stream=True):
  35. messages = [
  36. {
  37. "role": "system",
  38. "content": "You are ChatGLM3, a large language model trained by Zhipu.AI. Follow the user's instructions carefully. Respond using markdown.",
  39. },
  40. {
  41. "role": "user",
  42. "content": "你好,带在华政搞计算机有前途么"
  43. }
  44. ]
  45. response = client.chat.completions.create(
  46. model="chatglm3-6b",
  47. messages=messages,
  48. stream=use_stream,
  49. max_tokens=256,
  50. temperature=0.8,
  51. presence_penalty=1.1,
  52. top_p=0.8)
  53. if response:
  54. if use_stream:
  55. for chunk in response:
  56. print(chunk.choices[0].delta.content)
  57. else:
  58. content = response.choices[0].message.content
  59. print(content)
  60. else:
  61. print("Error:", response.status_code)

  1. if __name__ == "__main__":
  2. simple_chat(use_stream=False)
  3. # simple_chat(use_stream=True)
  4. #embedding()
  5. # function_chat()

 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/盐析白兔/article/detail/260425
推荐阅读
相关标签
  

闽ICP备14008679号