当前位置:   article > 正文

SpringAI项目之Ollama大模型工具【聊天机器人】

SpringAI项目之Ollama大模型工具【聊天机器人】

备注:

(1)大模型Ollama工具 安装 千问 或 llama3大模型

(2)SpringAI是新的Spring依赖,JDK17以上,Springboot3.2以上版本。

SpringAI项目开发【聊天机器人】

1、pom.xml依赖引入:

(1)首先构建一个springboot的web项目:

  1. <dependency>
  2. <groupId>org.springframework.boot</groupId>
  3. <artifactId>spring-boot-starter-web</artifactId>
  4. </dependency>

(2)引入SpringAI大模型依赖 -- spring-ai-ollama

  1. <dependency>
  2. <groupId>org.springframework.ai</groupId>
  3. <artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
  4. <version>1.0.0-SNAPSHOT</version>
  5. </dependency>
  1. <!-- 因为maven暂时下载不了SpringAIOllama依赖,引用Spring快照依赖包 -->
  2. <repositories>
  3. <repository>
  4. <id>spring-snapshots</id>
  5. <name>Spring Snapshots</name>
  6. <url>https://repo.spring.io/snapshot</url>
  7. <releases>
  8. <enabled>false</enabled>
  9. </releases>
  10. </repository>
  11. </repositories>

总结:完整的pom.xml

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  3. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
  4. <modelVersion>4.0.0</modelVersion>
  5. <parent>
  6. <groupId>org.springframework.boot</groupId>
  7. <artifactId>spring-boot-starter-parent</artifactId>
  8. <version>3.2.4</version>
  9. <relativePath/> <!-- lookup parent from repository -->
  10. </parent>
  11. <groupId>com.new3s</groupId>
  12. <artifactId>springAiTest428</artifactId>
  13. <version>1.0-SNAPSHOT</version>
  14. <name>springAiTest428</name>
  15. <description>springAiTest428</description>
  16. <properties>
  17. <java.version>22</java.version>
  18. </properties>
  19. <dependencies>
  20. <dependency>
  21. <groupId>org.springframework.boot</groupId>
  22. <artifactId>spring-boot-starter-web</artifactId>
  23. </dependency>
  24. <dependency>
  25. <groupId>org.springframework.ai</groupId>
  26. <artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
  27. <version>1.0.0-SNAPSHOT</version>
  28. </dependency>
  29. </dependencies>
  30. <repositories>
  31. <repository>
  32. <id>spring-snapshots</id>
  33. <name>Spring Snapshots</name>
  34. <url>https://repo.spring.io/snapshot</url>
  35. <releases>
  36. <enabled>false</enabled>
  37. </releases>
  38. </repository>
  39. </repositories>
  40. </project>

2、创建一个启动类

  1. package com.company;
  2. import org.springframework.boot.SpringApplication;
  3. import org.springframework.boot.autoconfigure.SpringBootApplication;
  4. @SpringBootApplication
  5. public class OllamaApplcation {
  6. public static void main(String[] args) {
  7. SpringApplication.run(OllamaApplcation.class, args);
  8. System.out.println("Hi, Spring AI Ollama!");
  9. }
  10. }

3、创建Controller

1> 千问的Controller

  1. package com.company.controller;
  2. import jakarta.annotation.Resource;
  3. import org.springframework.ai.chat.ChatResponse;
  4. import org.springframework.ai.chat.prompt.Prompt;
  5. import org.springframework.ai.ollama.OllamaChatClient;
  6. import org.springframework.ai.ollama.api.OllamaOptions;
  7. import org.springframework.beans.factory.annotation.Autowired;
  8. import org.springframework.web.bind.annotation.RequestMapping;
  9. import org.springframework.web.bind.annotation.RequestParam;
  10. import org.springframework.web.bind.annotation.RestController;
  11. @RestController
  12. public class QianWenController {
  13. @Resource
  14. private OllamaChatClient ollamaChatClient;
  15. @RequestMapping(value = "/qianwen-ai")
  16. private Object ollama(@RequestParam(value = "msg") String msg) {
  17. String called = ollamaChatClient.call(msg);
  18. System.out.println(called);
  19. return called;
  20. }
  21. @RequestMapping(value = "/qianwen-ai2")
  22. private Object ollama2(@RequestParam(value = "msg") String msg) {
  23. ChatResponse chatResponse = ollamaChatClient.call(new Prompt(msg, OllamaOptions.create()
  24. // 使用哪个模型:qwen:0.5b-chat模型
  25. .withModel("qwen:0.5b-chat")
  26. // 温度:温度值越高,准确率下降;温度值越低,准确率提高了
  27. .withTemperature(0.4F)
  28. ));
  29. //chatResponse.getResult().getOutput().getContent();
  30. System.out.println(chatResponse.getResult());
  31. return chatResponse.getResult();
  32. }
  33. }

2> llama3的Controller

  1. package com.company.controller;
  2. import jakarta.annotation.Resource;
  3. import org.springframework.ai.chat.ChatResponse;
  4. import org.springframework.ai.chat.prompt.Prompt;
  5. import org.springframework.ai.ollama.OllamaChatClient;
  6. import org.springframework.ai.ollama.api.OllamaOptions;
  7. import org.springframework.web.bind.annotation.RequestMapping;
  8. import org.springframework.web.bind.annotation.RequestParam;
  9. import org.springframework.web.bind.annotation.RestController;
  10. @RestController
  11. public class LlamaController {
  12. @Resource
  13. private OllamaChatClient ollamaChatClient;
  14. @RequestMapping(value = "/llama-ai")
  15. private Object ollama(@RequestParam(value = "msg") String msg) {
  16. String called = ollamaChatClient.call(msg);
  17. System.out.println(called);
  18. return called;
  19. }
  20. @RequestMapping(value = "/llama-ai2")
  21. private Object ollama2(@RequestParam(value = "msg") String msg) {
  22. ChatResponse chatResponse = ollamaChatClient.call(new Prompt(msg, OllamaOptions.create()
  23. // 使用哪个模型:llama3模型
  24. .withModel("llama3:8b")
  25. // 温度:温度值越高,准确率下降;温度值越低,准确率提高了
  26. .withTemperature(0.4F)
  27. ));
  28. System.out.println(chatResponse.getResult().getOutput());
  29. return chatResponse.getResult().getOutput();
  30. }
  31. }

4、资源配置【注意:Ollama的默认端口是11434】

  1. spring:
  2. ai:
  3. ollma:
  4. base-url: http://localhost:11434
  5. chat:
  6. options:
  7. # 配置文件指定时,现在程序中指定的模型,程序没有指定模型在对应查找配置中的模型
  8. # model: qwen:0.5b-chat
  9. model: llama3:8b

5、模型的运行结果

1> 调用llama3模型的对话接口

2> 调用qianwen模型的对话接口

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/木道寻08/article/detail/815431
推荐阅读
相关标签
  

闽ICP备14008679号