当前位置:   article > 正文

spring Ai框架整合Ollama,调用本地大模型_springboot ai 本地运行千问模型

springboot ai 本地运行千问模型

Ollama使用


Ollama是一个用于在本地计算机上运行大模型的软件
软件运行后监听11434端口,自己写的程序要调大模型就用这个端口

ollama命令
ollama list:显示模型列表
ollama show:显示模型的信息
ollama pull:拉取模型
ollama push:推送模型
ollama cp:拷贝一个模型
ollama rm:删除一个模型
ollama run:运行一个模型

ollama全是命令行下操作,所以结合web客户端界面使用【安装可选】
主流的web工具
1 Openwebui
2 LobeChat,功能强大,可调用Ollama的模型,也可调用openai,google的等,在设置界面中配置url和key即可


spring Ai框架调用


1 pom.xml,注意添加的依赖和配置了仓库

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  3. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
  4. <modelVersion>4.0.0</modelVersion>
  5. <parent>
  6. <groupId>org.springframework.boot</groupId>
  7. <artifactId>spring-boot-starter-parent</artifactId>
  8. <version>3.2.5</version>
  9. <relativePath/> <!-- lookup parent from repository -->
  10. </parent>
  11. <groupId>com.example</groupId>
  12. <artifactId>spring-ai-ollama</artifactId>
  13. <version>0.0.1-SNAPSHOT</version>
  14. <name>spring-ai-ollama</name>
  15. <description>spring-ai-ollama</description>
  16. <properties>
  17. <java.version>17</java.version>
  18. <spring-ai.version>0.8.1</spring-ai.version>
  19. </properties>
  20. <dependencies>
  21. <dependency>
  22. <groupId>org.springframework.boot</groupId>
  23. <artifactId>spring-boot-starter-web</artifactId>
  24. </dependency>
  25. <dependency>
  26. <groupId>io.springboot.ai</groupId>
  27. <artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
  28. <version>1.0.0</version>
  29. </dependency>
  30. <dependency>
  31. <groupId>org.springframework.boot</groupId>
  32. <artifactId>spring-boot-devtools</artifactId>
  33. <scope>runtime</scope>
  34. <optional>true</optional>
  35. </dependency>
  36. <dependency>
  37. <groupId>org.projectlombok</groupId>
  38. <artifactId>lombok</artifactId>
  39. <optional>true</optional>
  40. </dependency>
  41. <dependency>
  42. <groupId>org.springframework.boot</groupId>
  43. <artifactId>spring-boot-starter-test</artifactId>
  44. <scope>test</scope>
  45. </dependency>
  46. </dependencies>
  47. <dependencyManagement>
  48. <dependencies>
  49. <dependency>
  50. <groupId>org.springframework.ai</groupId>
  51. <artifactId>spring-ai-bom</artifactId>
  52. <version>${spring-ai.version}</version>
  53. <type>pom</type>
  54. <scope>import</scope>
  55. </dependency>
  56. </dependencies>
  57. </dependencyManagement>
  58. <build>
  59. <plugins>
  60. <plugin>
  61. <groupId>org.springframework.boot</groupId>
  62. <artifactId>spring-boot-maven-plugin</artifactId>
  63. <configuration>
  64. <excludes>
  65. <exclude>
  66. <groupId>org.projectlombok</groupId>
  67. <artifactId>lombok</artifactId>
  68. </exclude>
  69. </excludes>
  70. </configuration>
  71. </plugin>
  72. </plugins>
  73. </build>
  74. <repositories>
  75. <repository>
  76. <id>spring-milestones</id>
  77. <name>Spring Milestones</name>
  78. <url>https://repo.spring.io/milestone</url>
  79. <snapshots>
  80. <enabled>false</enabled>
  81. </snapshots>
  82. </repository>
  83. </repositories>
  84. </project>

2 yml配置,写自己的 Ollama 地址,模型用哪个,先用Ollama去下载

  1. spring:
  2. application:
  3. name: spring-ai-ollama
  4. ai:
  5. ollama:
  6. base-url: http://120.55.99.218:11434
  7. chat:
  8. options:
  9. model: gemma:7b

3 测试

  1. import org.springframework.ai.chat.ChatResponse;
  2. import org.springframework.ai.chat.messages.AssistantMessage;
  3. import org.springframework.ai.chat.prompt.Prompt;
  4. import org.springframework.ai.chat.prompt.PromptTemplate;
  5. import org.springframework.ai.ollama.OllamaChatClient;
  6. import org.springframework.ai.ollama.api.OllamaOptions;
  7. import org.springframework.beans.factory.annotation.Autowired;
  8. import org.springframework.web.bind.annotation.*;
  9. @RestController
  10. public class AiController {
  11. @Autowired
  12. private OllamaChatClient ollamaChatClient;
  13. @GetMapping(value = "/chat_1")
  14. public String chat_1(@RequestParam(value = "message") String message) {
  15. String call = ollamaChatClient.call(message);
  16. System.out.println("模型回答 = " + call);
  17. return call;
  18. }
  19. @GetMapping(value = "/chat_2")
  20. public Object chat_2(@RequestParam(value = "message") String message) {
  21. Prompt prompt = new Prompt(
  22. message,
  23. OllamaOptions.create()
  24. //代码中配置,会覆盖application.yml中的配置
  25. .withModel("gemma:7b") //使用什么大模型
  26. .withTemperature(0.9F) //温度高,更发散,准确性降低,温度低,更保守,准确性高
  27. );
  28. ChatResponse call = ollamaChatClient.call(prompt);
  29. AssistantMessage output = call.getResult().getOutput();
  30. System.out.println("模型回答 = " + output.getContent());
  31. return output;
  32. }
  33. @GetMapping("/chat_3/{size}")
  34. public String chatYear(@PathVariable("size") Integer size) {
  35. String message = "随便写一句话,{size} 字以内";
  36. PromptTemplate promptTemplate = new PromptTemplate(message);
  37. promptTemplate.add("size", size);
  38. System.out.println(promptTemplate.render());
  39. return ollamaChatClient.call(promptTemplate.render());
  40. }
  41. }
声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小小林熬夜学编程/article/detail/673844
推荐阅读
相关标签
  

闽ICP备14008679号