diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/ModelName.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/ModelName.java index 82822fd66..dc38a0197 100644 --- a/hutool-ai/src/main/java/cn/hutool/v7/ai/ModelName.java +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/ModelName.java @@ -42,7 +42,11 @@ public enum ModelName { /** * grok */ - GROK("grok"); + GROK("grok"), + /** + * ollama + */ + OLLAMA("ollama"); private final String value; diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/Models.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/Models.java index 06d83c870..3277abc6d 100644 --- a/hutool-ai/src/main/java/cn/hutool/v7/ai/Models.java +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/Models.java @@ -30,7 +30,7 @@ public class Models { private final String model; - Hutool(String model) { + Hutool(final String model) { this.model = model; } @@ -46,7 +46,7 @@ public class Models { private final String model; - DeepSeek(String model) { + DeepSeek(final String model) { this.model = model; } @@ -94,7 +94,7 @@ public class Models { private final String model; - Openai(String model) { + Openai(final String model) { this.model = model; } @@ -147,7 +147,7 @@ public class Models { private final String model; - Doubao(String model) { + Doubao(final String model) { this.model = model; } @@ -182,7 +182,7 @@ public class Models { private final String model; - Grok(String model) { + Grok(final String model) { this.model = model; } @@ -191,4 +191,18 @@ public class Models { } } + // Ollama的模型 + public enum Ollama { + QWEN3_32B("qwen3:32b"); + + private final String model; + + Ollama(final String model) { + this.model = model; + } + + public String getModel() { + return model; + } + } } diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaCommon.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaCommon.java new file mode 100644 index 000000000..203ce2e43 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaCommon.java @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.v7.ai.model.ollama; + +/** + * Ollama公共类 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaCommon { + + /** + * Ollama模型格式枚举 + */ + public enum OllamaFormat { + /** + * JSON格式 + */ + JSON("json"), + /** + * 无格式 + */ + NONE(""); + + private final String format; + + OllamaFormat(String format) { + this.format = format; + } + + public String getFormat() { + return format; + } + } + + /** + * Ollama选项常量 + */ + public static class Options { + /** + * 温度参数 + */ + public static final String TEMPERATURE = "temperature"; + /** + * top_p参数 + */ + public static final String TOP_P = "top_p"; + /** + * top_k参数 + */ + public static final String TOP_K = "top_k"; + /** + * 最大token数 + */ + public static final String NUM_PREDICT = "num_predict"; + /** + * 随机种子 + */ + public static final String SEED = "seed"; + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaConfig.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaConfig.java new file mode 100644 index 000000000..5e0275c41 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaConfig.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.v7.ai.model.ollama; + +import cn.hutool.v7.ai.Models; +import cn.hutool.v7.ai.core.BaseConfig; + +/** + * Ollama配置类,初始化API接口地址,设置默认的模型 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaConfig extends BaseConfig { + + private final String API_URL = "http://localhost:11434"; + + private final String DEFAULT_MODEL = Models.Ollama.QWEN3_32B.getModel(); + + public OllamaConfig() { + setApiUrl(API_URL); + setModel(DEFAULT_MODEL); + } + + public OllamaConfig(String apiUrl) { + this(); + setApiUrl(apiUrl); + } + + public OllamaConfig(String apiUrl, String model) { + this(); + setApiUrl(apiUrl); + setModel(model); + } + + @Override + public String getModelName() { + return "ollama"; + } + +} diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaProvider.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaProvider.java new file mode 100644 index 000000000..902cccd1a --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaProvider.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.v7.ai.model.ollama; + +import cn.hutool.v7.ai.core.AIConfig; +import cn.hutool.v7.ai.core.AIServiceProvider; + +/** + * 创建Ollama服务实现类 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaProvider implements AIServiceProvider { + + @Override + public String getServiceName() { + return "ollama"; + } + + @Override + public OllamaService create(final AIConfig config) { + return new OllamaServiceImpl(config); + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaService.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaService.java new file mode 100644 index 000000000..7cf922379 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaService.java @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.v7.ai.model.ollama; + +import cn.hutool.v7.ai.core.AIService; +import cn.hutool.v7.ai.core.Message; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +/** + * Ollama特有的功能 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public interface OllamaService extends AIService { + + /** + * 生成文本补全 + * + * @param prompt 输入提示 + * @return AI回答 + * @since 5.8.40 + */ + String generate(String prompt); + + /** + * 生成文本补全-SSE流式输出 + * + * @param prompt 输入提示 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + void generate(String prompt, Consumer callback); + + /** + * 生成文本补全(带选项) + * + * @param prompt 输入提示 + * @param format 响应格式 + * @return AI回答 + * @since 5.8.40 + */ + String generate(String prompt, String format); + + /** + * 生成文本补全(带选项)-SSE流式输出 + * + * @param prompt 输入提示 + * @param format 响应格式 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + void generate(String prompt, String format, Consumer callback); + + /** + * 生成文本嵌入向量 + * + * @param prompt 输入文本 + * @return 嵌入向量结果 + * @since 5.8.40 + */ + String embeddings(String prompt); + + /** + * 列出本地可用的模型 + * + * @return 模型列表 + * @since 5.8.40 + */ + String listModels(); + + /** + * 显示模型信息 + * + * @param modelName 模型名称 + * @return 模型信息 + * @since 5.8.40 + */ + String showModel(String modelName); + + /** + * 拉取模型 + * + * @param modelName 模型名称 + * @return 拉取结果 + * @since 5.8.40 + */ + String pullModel(String modelName); + + /** + * 删除模型 + * + * @param modelName 模型名称 + * @return 删除结果 + * @since 5.8.40 + */ + String deleteModel(String modelName); + + /** + * 复制模型 + * + * @param source 源模型名称 + * @param destination 目标模型名称 + * @return 复制结果 + * @since 5.8.40 + */ + String copyModel(String source, String destination); + + /** + * 简化的对话方法 + * + * @param prompt 对话题词 + * @return AI回答 + * @since 5.8.40 + */ + default String chat(String prompt) { + final List messages = new ArrayList<>(); + messages.add(new Message("user", prompt)); + return chat(messages); + } + + /** + * 简化的对话方法-SSE流式输出 + * + * @param prompt 对话题词 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + default void chat(String prompt, Consumer callback) { + final List messages = new ArrayList<>(); + messages.add(new Message("user", prompt)); + chat(messages, callback); + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaServiceImpl.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaServiceImpl.java new file mode 100644 index 000000000..675b54fa2 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/OllamaServiceImpl.java @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.v7.ai.model.ollama; + +import cn.hutool.v7.ai.AIException; +import cn.hutool.v7.ai.core.AIConfig; +import cn.hutool.v7.ai.core.BaseAIService; +import cn.hutool.v7.ai.core.Message; +import cn.hutool.v7.core.bean.path.BeanPath; +import cn.hutool.v7.core.text.StrUtil; +import cn.hutool.v7.core.thread.ThreadUtil; +import cn.hutool.v7.http.client.Request; +import cn.hutool.v7.http.client.Response; +import cn.hutool.v7.http.meta.HeaderName; +import cn.hutool.v7.http.meta.Method; +import cn.hutool.v7.json.JSONObject; +import cn.hutool.v7.json.JSONUtil; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +/** + * Ollama服务,AI具体功能的实现 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaServiceImpl extends BaseAIService implements OllamaService { + + // 对话补全 + private static final String CHAT_ENDPOINT = "/api/chat"; + // 文本生成 + private static final String GENERATE_ENDPOINT = "/api/generate"; + // 文本嵌入 + private static final String EMBEDDINGS_ENDPOINT = "/api/embeddings"; + // 列出模型 + private static final String LIST_MODELS_ENDPOINT = "/api/tags"; + // 显示模型信息 + private static final String SHOW_MODEL_ENDPOINT = "/api/show"; + // 拉取模型 + private static final String PULL_MODEL_ENDPOINT = "/api/pull"; + // 删除模型 + private static final String DELETE_MODEL_ENDPOINT = "/api/delete"; + // 复制模型 + private static final String COPY_MODEL_ENDPOINT = "/api/copy"; + + /** + * 构造函数 + * + * @param config AI配置 + */ + public OllamaServiceImpl(final AIConfig config) { + super(config); + } + + @Override + public String chat(final List messages) { + final String paramJson = buildChatRequestBody(messages); + final Response response = sendPost(CHAT_ENDPOINT, paramJson); + final JSONObject responseJson = JSONUtil.parseObj(response.body()); + final Object errorMessage = BeanPath.of("error").getValue(responseJson); + if(errorMessage!=null){ + throw new RuntimeException(errorMessage.toString()); + } + return BeanPath.of("message.content").getValue(responseJson).toString(); + } + + @Override + public void chat(final List messages, final Consumer callback) { + final Map paramMap = buildChatStreamRequestBody(messages); + ThreadUtil.newThread(() -> sendPostStream(CHAT_ENDPOINT, paramMap, callback::accept), "ollama-chat-sse").start(); + } + + @Override + public String generate(final String prompt) { + final String paramJson = buildGenerateRequestBody(prompt, null); + final Response response = sendPost(GENERATE_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public void generate(final String prompt, final Consumer callback) { + final Map paramMap = buildGenerateStreamRequestBody(prompt, null); + ThreadUtil.newThread(() -> sendPostStream(GENERATE_ENDPOINT, paramMap, callback::accept), "ollama-generate-sse").start(); + } + + @Override + public String generate(final String prompt, final String format) { + final String paramJson = buildGenerateRequestBody(prompt, format); + final Response response = sendPost(GENERATE_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public void generate(final String prompt, final String format, final Consumer callback) { + final Map paramMap = buildGenerateStreamRequestBody(prompt, format); + ThreadUtil.newThread(() -> sendPostStream(GENERATE_ENDPOINT, paramMap, callback::accept), "ollama-generate-sse").start(); + } + + @Override + public String embeddings(final String prompt) { + final String paramJson = buildEmbeddingsRequestBody(prompt); + final Response response = sendPost(EMBEDDINGS_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public String listModels() { + final Response response = sendGet(LIST_MODELS_ENDPOINT); + return response.bodyStr(); + } + + @Override + public String showModel(final String modelName) { + final String paramJson = buildShowModelRequestBody(modelName); + final Response response = sendPost(SHOW_MODEL_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public String pullModel(final String modelName) { + final String paramJson = buildPullModelRequestBody(modelName); + final Response response = sendPost(PULL_MODEL_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public String deleteModel(final String modelName) { + final String paramJson = buildDeleteModelRequestBody(modelName); + final Response response = sendDeleteRequest(DELETE_MODEL_ENDPOINT, paramJson); + return response.bodyStr(); + } + + @Override + public String copyModel(final String source, final String destination) { + final String paramJson = buildCopyModelRequestBody(source, destination); + final Response response = sendPost(COPY_MODEL_ENDPOINT, paramJson); + return response.bodyStr(); + } + + // 构建chat请求体 + private String buildChatRequestBody(final List messages) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream",false); + paramMap.put("model", config.getModel()); + paramMap.put("messages", messages); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建chatStream请求体 + private Map buildChatStreamRequestBody(final List messages) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream", true); + paramMap.put("model", config.getModel()); + paramMap.put("messages", messages); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return paramMap; + } + + // 构建generate请求体 + private String buildGenerateRequestBody(final String prompt, final String format) { + final Map paramMap = new HashMap<>(); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + if (StrUtil.isNotBlank(format)) { + paramMap.put("format", format); + } + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建generateStream请求体 + private Map buildGenerateStreamRequestBody(final String prompt, final String format) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream", true); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + if (StrUtil.isNotBlank(format)) { + paramMap.put("format", format); + } + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return paramMap; + } + + // 构建embeddings请求体 + private String buildEmbeddingsRequestBody(final String prompt) { + final Map paramMap = new HashMap<>(); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建showModel请求体 + private String buildShowModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建pullModel请求体 + private String buildPullModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建deleteModel请求体 + private String buildDeleteModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + /** + * 发送DELETE请求 + * + * @param endpoint 请求端点 + * @param paramJson 请求参数JSON + * @return 响应结果 + */ + private Response sendDeleteRequest(final String endpoint, final String paramJson) { + try { + return Request.of(config.getApiUrl() + endpoint) + .method(Method.DELETE) + .header(HeaderName.CONTENT_TYPE, "application/json") + .header(HeaderName.ACCEPT, "application/json") + .body(paramJson) + .send(); + } catch (final Exception e) { + throw new AIException("Failed to send DELETE request: " + e.getMessage(), e); + } + } + + // 构建copyModel请求体 + private String buildCopyModelRequestBody(final String source, final String destination) { + final Map requestBody = new HashMap<>(); + requestBody.put("source", source); + requestBody.put("destination", destination); + return JSONUtil.toJsonStr(requestBody); + } + +} diff --git a/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/package-info.java b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/package-info.java new file mode 100644 index 000000000..05d7f8c1c --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/v7/ai/model/ollama/package-info.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * 对Ollama的封装实现. + *

+ * 使用方法: + * // 创建AI服务 + *

{@code
+ * OllamaService aiService = AIServiceFactory.getAIService(
+ * new AIConfigBuilder(ModelName.OLLAMA.getValue())
+ * .setApiUrl("http://localhost:11434")
+ * .setModel("qwen2.5-coder:32b")
+ * .build(),
+ * OllamaService.class
+ * );
+ *
+ * // 构造上下文
+ * List messageList=new ArrayList<>();
+ * messageList.add(new Message("system","你是一个疯疯癫癫的机器人"));
+ * messageList.add(new Message("user","你能帮我做什么"));
+ *
+ * // 输出对话结果
+ * System.out.println(aiService.chat(messageList));
+ * }
+ * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ + +package cn.hutool.v7.ai.model.ollama; diff --git a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIConfig b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIConfig index 20bf89fa2..f9a398991 100644 --- a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIConfig +++ b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIConfig @@ -3,3 +3,4 @@ cn.hutool.v7.ai.model.deepseek.DeepSeekConfig cn.hutool.v7.ai.model.openai.OpenaiConfig cn.hutool.v7.ai.model.doubao.DoubaoConfig cn.hutool.v7.ai.model.grok.GrokConfig +cn.hutool.v7.ai.model.ollama.OllamaConfig diff --git a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIServiceProvider b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIServiceProvider index babbf10b6..a9df885fe 100644 --- a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIServiceProvider +++ b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.v7.ai.core.AIServiceProvider @@ -3,3 +3,4 @@ cn.hutool.v7.ai.model.deepseek.DeepSeekProvider cn.hutool.v7.ai.model.openai.OpenaiProvider cn.hutool.v7.ai.model.doubao.DoubaoProvider cn.hutool.v7.ai.model.grok.GrokProvider +cn.hutool.v7.ai.model.ollama.OllamaProvider diff --git a/hutool-ai/src/test/java/cn/hutool/v7/ai/model/ollama/OllamaServiceTest.java b/hutool-ai/src/test/java/cn/hutool/v7/ai/model/ollama/OllamaServiceTest.java new file mode 100644 index 000000000..47c96863e --- /dev/null +++ b/hutool-ai/src/test/java/cn/hutool/v7/ai/model/ollama/OllamaServiceTest.java @@ -0,0 +1,256 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package cn.hutool.v7.ai.model.ollama; + +import cn.hutool.v7.ai.AIServiceFactory; +import cn.hutool.v7.ai.ModelName; +import cn.hutool.v7.ai.core.AIConfigBuilder; +import cn.hutool.v7.ai.core.Message; +import cn.hutool.v7.core.text.split.SplitUtil; +import cn.hutool.v7.core.thread.ThreadUtil; +import cn.hutool.v7.json.JSON; +import cn.hutool.v7.json.JSONArray; +import cn.hutool.v7.json.JSONUtil; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * OllamaService + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +class OllamaServiceTest { + // 创建service + OllamaService ollamaService = AIServiceFactory.getAIService( + new AIConfigBuilder(ModelName.OLLAMA.getValue()) + // 这里填写Ollama服务的地址 + .setApiUrl("http://127.0.0.1:11434") + // 这里填写使用的模型 + .setModel("qwen2.5-coder:32b") + .build(), + OllamaService.class + ); + + // 假设有一个Java工程师的Agent提示词 + String javaEngineerPrompt= """ + # 角色 + 你是一位精通Spring Boot 3.0的资深Java全栈工程师,具备以下核心能力: + - 精通Spring Boot 3.0新特性与最佳实践 + - 熟练整合Hutool工具包、Redis数据访问、Feign远程调用、FreeMarker模板引擎 + - 能输出符合工程规范的代码结构和配置文件 + - 注重代码可读性与注释规范 + + # 任务 + 请完成以下编程任务(按优先级排序): + 1. **核心要求** + - 使用Spring Boot 3.0构建项目 + - 必须包含以下依赖: + - `cn.hutool:hutool-all`(最新版) + - `org.springframework.boot:spring-boot-starter-data-redis` + - `org.springframework.cloud:spring-cloud-starter-openfeign` + - `org.springframework.boot:spring-boot-starter-freemarker` + 2. **约束条件** + - 代码需符合Java 17语法规范 + - 每个类必须包含Javadoc风格的类注释 + - 关键方法需添加`@Api`/`@ApiOperation`注解(若涉及接口) + - Redis操作需使用`RedisTemplate`实现 + 3. **实现流程** + ``` + 1. 生成pom.xml依赖配置 + 2. 创建基础配置类(如RedisConfig) + 3. 编写Feign客户端接口 + 4. 实现FreeMarker模板渲染服务 + 5. 提供完整Controller示例 + ``` + + # 输出要求 + 请以严格Markdown格式输出,每个模块独立代码块: + ```markdown + ## 1. 项目依赖配置(pom.xml片段) + ```xml + ... + ``` + + ## 2. Redis配置类 + ```java + @Configuration + public class RedisConfig { ... } + ``` + + ## 3. Feign客户端示例 + ```java + @FeignClient(name = "...") + public interface ... { ... } + ``` + + ## 4. FreeMarker模板服务 + ```java + @Service + public class TemplateService { ... } + ``` + + ## 5. 控制器示例 + ```java + @RestController + @RequestMapping("/example") + public class ExampleController { ... } + ``` + ``` + + # 示例片段(供格式参考) + ```java + /** + * 示例Feign客户端 + * @since 1.0.0 + */ + @FeignClient(name = "demo-service", url = "${demo.service.url}") + public interface DemoClient { + + @GetMapping("/data/{id}") + @ApiOperation("获取示例数据") + ResponseEntity getData(@PathVariable("id") Long id); + } + ``` + + 请按此规范输出完整代码结构,确保自动化程序可直接解析生成项目文件。"""; + + /** + * 同步方式调用 + */ + @Test + @Disabled + void testSimple() { + final String answer = ollamaService.chat("写一个疯狂星期四广告词"); + assertNotNull(answer); + } + + /** + * 按流方式输出 + */ + @Test + @Disabled + void testStream() { + final AtomicBoolean isDone = new AtomicBoolean(false); + final AtomicReference errorMessage = new AtomicReference<>(); + ollamaService.chat("写一个疯狂星期四广告词", data -> { + // 输出到控制台 + final JSON streamData = JSONUtil.parse(data); + if (streamData.getByPath("error") != null) { + isDone.set(true); + errorMessage.set(streamData.getByPath("error").toString()); + return; + } + + if ("true".equals(streamData.getByPath("done").toString())) { + isDone.set(true); + } + }); + // 轮询检查结束标志 + while (!isDone.get()) { + ThreadUtil.sleep(100); + } + if (errorMessage.get() != null) { + throw new RuntimeException(errorMessage.get()); + } + } + + /** + * 带历史上下文的同步方式调用 + */ + @Test + @Disabled + void testSimpleWithHistory(){ + final List messageList=new ArrayList<>(); + messageList.add(new Message("system",javaEngineerPrompt)); + messageList.add(new Message("user","帮我写一个Java通过Post方式发送JSON给HTTP接口,请求头带有token")); + final String result = ollamaService.chat(messageList); + assertNotNull(result); + } + + @Test + @Disabled + void testStreamWithHistory(){ + final List messageList=new ArrayList<>(); + messageList.add(new Message("system",javaEngineerPrompt)); + messageList.add(new Message("user","帮我写一个Java通过Post方式发送JSON给HTTP接口,请求头带有token")); + final AtomicBoolean isDone = new AtomicBoolean(false); + final AtomicReference errorMessage = new AtomicReference<>(); + ollamaService.chat(messageList, data -> { + // 输出到控制台 + final JSON streamData = JSONUtil.parse(data); + if (streamData.getByPath("error") != null) { + isDone.set(true); + errorMessage.set(streamData.getByPath("error").toString()); + return; + } + + if ("true".equals(streamData.getByPath("done").toString())) { + isDone.set(true); + } + }); + // 轮询检查结束标志 + while (!isDone.get()) { + ThreadUtil.sleep(100); + } + if (errorMessage.get() != null) { + throw new RuntimeException(errorMessage.get()); + } + } + + /** + * 列出所有已经拉取到服务器上的模型 + */ + @Test + @Disabled + void testListModels(){ + final String models = ollamaService.listModels(); + final JSONArray modelList = JSONUtil.parse(models).getByPath("models", JSONArray.class); + } + + /** + * 让Ollama拉取模型 + */ + @Test + @Disabled + void testPullModel(){ + final String result = ollamaService.pullModel("qwen2.5:0.5b"); + final List lines = SplitUtil.splitTrim(result, "\n"); + for (final String line : lines) { + if(line.contains("error")){ + throw new RuntimeException(JSONUtil.parse(line).getByPath("error").toString()); + } + } + } + + /** + * 让Ollama删除已经存在的模型 + */ + @Test + @Disabled + void testDeleteModel(){ + // 不会返回任何信息 + ollamaService.deleteModel("qwen2.5:0.5b"); + } +} +