diff --git a/hutool-ai/src/main/java/cn/hutool/ai/ModelName.java b/hutool-ai/src/main/java/cn/hutool/ai/ModelName.java index ceaca7bb8..6e680fe76 100644 --- a/hutool-ai/src/main/java/cn/hutool/ai/ModelName.java +++ b/hutool-ai/src/main/java/cn/hutool/ai/ModelName.java @@ -43,7 +43,11 @@ public enum ModelName { /** * grok */ - GROK("grok"); + GROK("grok"), + /** + * ollama + */ + OLLAMA("ollama"); private final String value; diff --git a/hutool-ai/src/main/java/cn/hutool/ai/Models.java b/hutool-ai/src/main/java/cn/hutool/ai/Models.java index 04856d367..c1dd6266d 100644 --- a/hutool-ai/src/main/java/cn/hutool/ai/Models.java +++ b/hutool-ai/src/main/java/cn/hutool/ai/Models.java @@ -192,4 +192,19 @@ public class Models { } } + // Ollama的模型 + public enum Ollama { + QWEN3_32B("qwen3:32b"); + + private final String model; + + Ollama(String model) { + this.model = model; + } + + public String getModel() { + return model; + } + } + } diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaCommon.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaCommon.java new file mode 100644 index 000000000..e504c61c1 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaCommon.java @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.ai.model.ollama; + +/** + * Ollama公共类 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaCommon { + + /** + * Ollama模型格式枚举 + */ + public enum OllamaFormat { + /** + * JSON格式 + */ + JSON("json"), + /** + * 无格式 + */ + NONE(""); + + private final String format; + + OllamaFormat(String format) { + this.format = format; + } + + public String getFormat() { + return format; + } + } + + /** + * Ollama选项常量 + */ + public static class Options { + /** + * 温度参数 + */ + public static final String TEMPERATURE = "temperature"; + /** + * top_p参数 + */ + public static final String TOP_P = "top_p"; + /** + * top_k参数 + */ + public static final String TOP_K = "top_k"; + /** + * 最大token数 + */ + public static final String NUM_PREDICT = "num_predict"; + /** + * 随机种子 + */ + public static final String SEED = "seed"; + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaConfig.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaConfig.java new file mode 100644 index 000000000..761c1f1b0 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaConfig.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.ai.model.ollama; + +import cn.hutool.ai.Models; +import cn.hutool.ai.core.BaseConfig; + +/** + * Ollama配置类,初始化API接口地址,设置默认的模型 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaConfig extends BaseConfig { + + private final String API_URL = "http://localhost:11434"; + + private final String DEFAULT_MODEL = Models.Ollama.QWEN3_32B.getModel(); + + public OllamaConfig() { + setApiUrl(API_URL); + setModel(DEFAULT_MODEL); + } + + public OllamaConfig(String apiUrl) { + this(); + setApiUrl(apiUrl); + } + + public OllamaConfig(String apiUrl, String model) { + this(); + setApiUrl(apiUrl); + setModel(model); + } + + @Override + public String getModelName() { + return "ollama"; + } + +} diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaProvider.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaProvider.java new file mode 100644 index 000000000..ec2639b33 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaProvider.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.ai.model.ollama; + +import cn.hutool.ai.core.AIConfig; +import cn.hutool.ai.core.AIServiceProvider; + +/** + * 创建Ollama服务实现类 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaProvider implements AIServiceProvider { + + @Override + public String getServiceName() { + return "ollama"; + } + + @Override + public OllamaService create(final AIConfig config) { + return new OllamaServiceImpl(config); + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaService.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaService.java new file mode 100644 index 000000000..e1a947e68 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaService.java @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.ai.model.ollama; + +import cn.hutool.ai.core.AIService; +import cn.hutool.ai.core.Message; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +/** + * Ollama特有的功能 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public interface OllamaService extends AIService { + + /** + * 生成文本补全 + * + * @param prompt 输入提示 + * @return AI回答 + * @since 5.8.40 + */ + String generate(String prompt); + + /** + * 生成文本补全-SSE流式输出 + * + * @param prompt 输入提示 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + void generate(String prompt, Consumer callback); + + /** + * 生成文本补全(带选项) + * + * @param prompt 输入提示 + * @param format 响应格式 + * @return AI回答 + * @since 5.8.40 + */ + String generate(String prompt, String format); + + /** + * 生成文本补全(带选项)-SSE流式输出 + * + * @param prompt 输入提示 + * @param format 响应格式 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + void generate(String prompt, String format, Consumer callback); + + /** + * 生成文本嵌入向量 + * + * @param prompt 输入文本 + * @return 嵌入向量结果 + * @since 5.8.40 + */ + String embeddings(String prompt); + + /** + * 列出本地可用的模型 + * + * @return 模型列表 + * @since 5.8.40 + */ + String listModels(); + + /** + * 显示模型信息 + * + * @param modelName 模型名称 + * @return 模型信息 + * @since 5.8.40 + */ + String showModel(String modelName); + + /** + * 拉取模型 + * + * @param modelName 模型名称 + * @return 拉取结果 + * @since 5.8.40 + */ + String pullModel(String modelName); + + /** + * 删除模型 + * + * @param modelName 模型名称 + * @return 删除结果 + * @since 5.8.40 + */ + String deleteModel(String modelName); + + /** + * 复制模型 + * + * @param source 源模型名称 + * @param destination 目标模型名称 + * @return 复制结果 + * @since 5.8.40 + */ + String copyModel(String source, String destination); + + /** + * 简化的对话方法 + * + * @param prompt 对话题词 + * @return AI回答 + * @since 5.8.40 + */ + default String chat(String prompt) { + final List messages = new ArrayList<>(); + messages.add(new Message("user", prompt)); + return chat(messages); + } + + /** + * 简化的对话方法-SSE流式输出 + * + * @param prompt 对话题词 + * @param callback 流式数据回调函数 + * @since 5.8.40 + */ + default void chat(String prompt, Consumer callback) { + final List messages = new ArrayList<>(); + messages.add(new Message("user", prompt)); + chat(messages, callback); + } +} diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaServiceImpl.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaServiceImpl.java new file mode 100644 index 000000000..62b6bc0a4 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/OllamaServiceImpl.java @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package cn.hutool.ai.model.ollama; + +import cn.hutool.ai.AIException; +import cn.hutool.ai.core.AIConfig; +import cn.hutool.ai.core.BaseAIService; +import cn.hutool.ai.core.Message; +import cn.hutool.core.bean.BeanPath; +import cn.hutool.core.thread.ThreadUtil; +import cn.hutool.core.util.StrUtil; +import cn.hutool.http.Header; +import cn.hutool.http.HttpRequest; +import cn.hutool.http.HttpResponse; +import cn.hutool.json.JSONObject; +import cn.hutool.json.JSONUtil; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +/** + * Ollama服务,AI具体功能的实现 + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ +public class OllamaServiceImpl extends BaseAIService implements OllamaService { + + // 对话补全 + private static final String CHAT_ENDPOINT = "/api/chat"; + // 文本生成 + private static final String GENERATE_ENDPOINT = "/api/generate"; + // 文本嵌入 + private static final String EMBEDDINGS_ENDPOINT = "/api/embeddings"; + // 列出模型 + private static final String LIST_MODELS_ENDPOINT = "/api/tags"; + // 显示模型信息 + private static final String SHOW_MODEL_ENDPOINT = "/api/show"; + // 拉取模型 + private static final String PULL_MODEL_ENDPOINT = "/api/pull"; + // 删除模型 + private static final String DELETE_MODEL_ENDPOINT = "/api/delete"; + // 复制模型 + private static final String COPY_MODEL_ENDPOINT = "/api/copy"; + + /** + * 构造函数 + * + * @param config AI配置 + */ + public OllamaServiceImpl(final AIConfig config) { + super(config); + } + + @Override + public String chat(final List messages) { + final String paramJson = buildChatRequestBody(messages); + final HttpResponse response = sendPost(CHAT_ENDPOINT, paramJson); + JSONObject responseJson = JSONUtil.parseObj(response.body()); + Object errorMessage = BeanPath.create("error").get(responseJson); + if(errorMessage!=null){ + throw new RuntimeException(errorMessage.toString()); + } + return BeanPath.create("message.content").get(responseJson).toString(); + } + + @Override + public void chat(final List messages, final Consumer callback) { + Map paramMap = buildChatStreamRequestBody(messages); + ThreadUtil.newThread(() -> sendPostStream(CHAT_ENDPOINT, paramMap, callback::accept), "ollama-chat-sse").start(); + } + + @Override + public String generate(String prompt) { + final String paramJson = buildGenerateRequestBody(prompt, null); + final HttpResponse response = sendPost(GENERATE_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public void generate(String prompt, Consumer callback) { + Map paramMap = buildGenerateStreamRequestBody(prompt, null); + ThreadUtil.newThread(() -> sendPostStream(GENERATE_ENDPOINT, paramMap, callback::accept), "ollama-generate-sse").start(); + } + + @Override + public String generate(String prompt, String format) { + final String paramJson = buildGenerateRequestBody(prompt, format); + final HttpResponse response = sendPost(GENERATE_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public void generate(String prompt, String format, Consumer callback) { + Map paramMap = buildGenerateStreamRequestBody(prompt, format); + ThreadUtil.newThread(() -> sendPostStream(GENERATE_ENDPOINT, paramMap, callback::accept), "ollama-generate-sse").start(); + } + + @Override + public String embeddings(String prompt) { + final String paramJson = buildEmbeddingsRequestBody(prompt); + final HttpResponse response = sendPost(EMBEDDINGS_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public String listModels() { + final HttpResponse response = sendGet(LIST_MODELS_ENDPOINT); + return response.body(); + } + + @Override + public String showModel(String modelName) { + final String paramJson = buildShowModelRequestBody(modelName); + final HttpResponse response = sendPost(SHOW_MODEL_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public String pullModel(String modelName) { + final String paramJson = buildPullModelRequestBody(modelName); + final HttpResponse response = sendPost(PULL_MODEL_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public String deleteModel(String modelName) { + final String paramJson = buildDeleteModelRequestBody(modelName); + final HttpResponse response = sendDeleteRequest(DELETE_MODEL_ENDPOINT, paramJson); + return response.body(); + } + + @Override + public String copyModel(String source, String destination) { + final String paramJson = buildCopyModelRequestBody(source, destination); + final HttpResponse response = sendPost(COPY_MODEL_ENDPOINT, paramJson); + return response.body(); + } + + // 构建chat请求体 + private String buildChatRequestBody(final List messages) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream",false); + paramMap.put("model", config.getModel()); + paramMap.put("messages", messages); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建chatStream请求体 + private Map buildChatStreamRequestBody(final List messages) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream", true); + paramMap.put("model", config.getModel()); + paramMap.put("messages", messages); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return paramMap; + } + + // 构建generate请求体 + private String buildGenerateRequestBody(final String prompt, final String format) { + final Map paramMap = new HashMap<>(); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + if (StrUtil.isNotBlank(format)) { + paramMap.put("format", format); + } + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建generateStream请求体 + private Map buildGenerateStreamRequestBody(final String prompt, final String format) { + final Map paramMap = new HashMap<>(); + paramMap.put("stream", true); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + if (StrUtil.isNotBlank(format)) { + paramMap.put("format", format); + } + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return paramMap; + } + + // 构建embeddings请求体 + private String buildEmbeddingsRequestBody(final String prompt) { + final Map paramMap = new HashMap<>(); + paramMap.put("model", config.getModel()); + paramMap.put("prompt", prompt); + // 合并其他参数 + paramMap.putAll(config.getAdditionalConfigMap()); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建showModel请求体 + private String buildShowModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建pullModel请求体 + private String buildPullModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + // 构建deleteModel请求体 + private String buildDeleteModelRequestBody(final String modelName) { + final Map paramMap = new HashMap<>(); + paramMap.put("name", modelName); + + return JSONUtil.toJsonStr(paramMap); + } + + /** + * 发送DELETE请求 + * + * @param endpoint 请求端点 + * @param paramJson 请求参数JSON + * @return 响应结果 + */ + private HttpResponse sendDeleteRequest(String endpoint, String paramJson) { + try { + return HttpRequest.delete(config.getApiUrl() + endpoint) + .header(Header.CONTENT_TYPE, "application/json") + .header(Header.ACCEPT, "application/json") + .body(paramJson) + .timeout(config.getTimeout()) + .execute(); + } catch (Exception e) { + throw new AIException("Failed to send DELETE request: " + e.getMessage(), e); + } + } + + // 构建copyModel请求体 + private String buildCopyModelRequestBody(final String source, final String destination) { + Map requestBody = new HashMap<>(); + requestBody.put("source", source); + requestBody.put("destination", destination); + return JSONUtil.toJsonStr(requestBody); + } + +} diff --git a/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/package-info.java b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/package-info.java new file mode 100644 index 000000000..0aa8e1e97 --- /dev/null +++ b/hutool-ai/src/main/java/cn/hutool/ai/model/ollama/package-info.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2025 Hutool Team and hutool.cn + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * 对Ollama的封装实现. + * + * 使用方法: + * // 创建AI服务 + * OllamaService aiService = AIServiceFactory.getAIService( + * new AIConfigBuilder(ModelName.OLLAMA.getValue()) + * .setApiUrl("http://localhost:11434") + * .setModel("qwen2.5-coder:32b") + * .build(), + * OllamaService.class + * ); + * + * // 构造上下文 + * List messageList=new ArrayList<>(); + * messageList.add(new Message("system","你是一个疯疯癫癫的机器人")); + * messageList.add(new Message("user","你能帮我做什么")); + * + * // 输出对话结果 + * System.out.println(aiService.chat(messageList)); + * + * @author yangruoyu-yumeisoft + * @since 5.8.40 + */ + +package cn.hutool.ai.model.ollama; diff --git a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIConfig b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIConfig index f26782976..f37c7fddd 100644 --- a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIConfig +++ b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIConfig @@ -3,3 +3,4 @@ cn.hutool.ai.model.deepseek.DeepSeekConfig cn.hutool.ai.model.openai.OpenaiConfig cn.hutool.ai.model.doubao.DoubaoConfig cn.hutool.ai.model.grok.GrokConfig +cn.hutool.ai.model.ollama.OllamaConfig diff --git a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIServiceProvider b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIServiceProvider index d796fd60c..58c8a5d54 100644 --- a/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIServiceProvider +++ b/hutool-ai/src/main/resources/META-INF/services/cn.hutool.ai.core.AIServiceProvider @@ -3,3 +3,4 @@ cn.hutool.ai.model.deepseek.DeepSeekProvider cn.hutool.ai.model.openai.OpenaiProvider cn.hutool.ai.model.doubao.DoubaoProvider cn.hutool.ai.model.grok.GrokProvider +cn.hutool.ai.model.ollama.OllamaProvider