diff --git a/docs/blog/2025-03-08-blog/index.md b/docs/blog/2025-03-08-blog/index.md index f520696e..1b6f4935 100644 --- a/docs/blog/2025-03-08-blog/index.md +++ b/docs/blog/2025-03-08-blog/index.md @@ -209,7 +209,6 @@ To download/pull the model into your Ollama server, run the following command in ```shell ollama pull mistral - ``` You can list the models available on your model server by running the following command in your terminal. diff --git a/docs/docs/apis-generate/generate-with-tools.md b/docs/docs/apis-generate/generate-with-tools.md index 236e8323..9f1d17e1 100644 --- a/docs/docs/apis-generate/generate-with-tools.md +++ b/docs/docs/apis-generate/generate-with-tools.md @@ -31,7 +31,19 @@ You could do that with ease with the `function calling` capabilities of the mode ### Create Tools/Functions -We can create static functions as our tools. +There are two ways to create and register your tools: + +1. **Define static or regular methods and register them explicitly as tools.** + You can create standalone functions (static or instance methods) and manually associate them with your tool specifications. + +2. **Use annotation-based tool discovery for automatic registration.** + By annotating your tool methods, you can leverage `registerAnnotatedTools()` to automatically scan your classpath, find all annotated tool functions, and register them without extra boilerplate. + +Learn more about annotation-based tool registration [here](/apis-generate/chat-with-tools#annotation-based-tool-registration). + +Choose the approach that best fits your project—manual for precise control, or annotation-based for easier scaling. + +Let's start by exploring the first approach: manually defining and registering your tools/functions. This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns fuel price value. diff --git a/src/main/java/io/github/ollama4j/Ollama.java b/src/main/java/io/github/ollama4j/Ollama.java index cfbdf413..36f8bf4e 100644 --- a/src/main/java/io/github/ollama4j/Ollama.java +++ b/src/main/java/io/github/ollama4j/Ollama.java @@ -804,9 +804,21 @@ public class Ollama { ocm.setResponse(request.getPrompt()); chatRequest.setMessages(msgs); msgs.add(ocm); + + // Merge request's tools and globally registered tools into a new list to avoid mutating the + // original request + List allTools = new ArrayList<>(); + if (request.getTools() != null) { + allTools.addAll(request.getTools()); + } + List registeredTools = this.getRegisteredTools(); + if (registeredTools != null) { + allTools.addAll(registeredTools); + } + OllamaChatTokenHandler hdlr = null; chatRequest.setUseTools(true); - chatRequest.setTools(request.getTools()); + chatRequest.setTools(allTools); if (streamObserver != null) { chatRequest.setStream(true); if (streamObserver.getResponseStreamHandler() != null) { diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java index fef35e36..0cd00276 100644 --- a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java +++ b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java @@ -44,12 +44,15 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { /** * Parses streamed Response line from ollama chat. Using {@link - * com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw + * com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} + * should throw * {@link IllegalArgumentException} in case of null line or {@link - * com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot be parsed to a - * {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be null. + * com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot + * be parsed to a + * {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be + * null. * - * @param line streamed line of ollama stream response + * @param line streamed line of ollama stream response * @param responseBuffer Stringbuffer to add latest response message part to * @return TRUE, if ollama-Response has 'done' state */ @@ -59,9 +62,11 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { try { OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); - // It seems that under heavy load Ollama responds with an empty chat message part in the + // It seems that under heavy load Ollama responds with an empty chat message + // part in the // streamed response. - // Thus, we null check the message and hope that the next streamed response has some + // Thus, we null check the message and hope that the next streamed response has + // some // message content again. OllamaChatMessage message = ollamaResponseModel.getMessage(); if (message != null) { @@ -118,7 +123,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer); ollamaChatResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); - if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) { + if (body.stream + && ollamaChatResponseModel.getMessage() != null + && ollamaChatResponseModel.getMessage().getToolCalls() != null) { wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls(); } if (finished && body.stream) { @@ -153,7 +160,8 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { } /** - * Handles error status codes and appends error messages to the response buffer. Returns true if + * Handles error status codes and appends error messages to the response buffer. + * Returns true if * an error was handled, false otherwise. */ private boolean handleErrorStatus(int statusCode, String line, StringBuilder responseBuffer)