Merge pull request #226 from ollama4j/fix-docs-build

Fix: setting tools in `generate()` method and issue #227
This commit is contained in:
Amith Koujalgi
2025-11-02 21:37:55 +05:30
committed by GitHub
4 changed files with 42 additions and 11 deletions

View File

@@ -209,7 +209,6 @@ To download/pull the model into your Ollama server, run the following command in
```shell
ollama pull mistral
```
You can list the models available on your model server by running the following command in your terminal.

View File

@@ -31,7 +31,19 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Tools/Functions
We can create static functions as our tools.
There are two ways to create and register your tools:
1. **Define static or regular methods and register them explicitly as tools.**
You can create standalone functions (static or instance methods) and manually associate them with your tool specifications.
2. **Use annotation-based tool discovery for automatic registration.**
By annotating your tool methods, you can leverage `registerAnnotatedTools()` to automatically scan your classpath, find all annotated tool functions, and register them without extra boilerplate.
Learn more about annotation-based tool registration [here](/apis-generate/chat-with-tools#annotation-based-tool-registration).
Choose the approach that best fits your project—manual for precise control, or annotation-based for easier scaling.
Let's start by exploring the first approach: manually defining and registering your tools/functions.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value.

View File

@@ -804,9 +804,21 @@ public class Ollama {
ocm.setResponse(request.getPrompt());
chatRequest.setMessages(msgs);
msgs.add(ocm);
// Merge request's tools and globally registered tools into a new list to avoid mutating the
// original request
List<Tools.Tool> allTools = new ArrayList<>();
if (request.getTools() != null) {
allTools.addAll(request.getTools());
}
List<Tools.Tool> registeredTools = this.getRegisteredTools();
if (registeredTools != null) {
allTools.addAll(registeredTools);
}
OllamaChatTokenHandler hdlr = null;
chatRequest.setUseTools(true);
chatRequest.setTools(request.getTools());
chatRequest.setTools(allTools);
if (streamObserver != null) {
chatRequest.setStream(true);
if (streamObserver.getResponseStreamHandler() != null) {

View File

@@ -44,10 +44,13 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
/**
* Parses streamed Response line from ollama chat. Using {@link
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)}
* should throw
* {@link IllegalArgumentException} in case of null line or {@link
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be null.
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot
* be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be
* null.
*
* @param line streamed line of ollama stream response
* @param responseBuffer Stringbuffer to add latest response message part to
@@ -59,9 +62,11 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
try {
OllamaChatResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
// It seems that under heavy load Ollama responds with an empty chat message part in the
// It seems that under heavy load Ollama responds with an empty chat message
// part in the
// streamed response.
// Thus, we null check the message and hope that the next streamed response has some
// Thus, we null check the message and hope that the next streamed response has
// some
// message content again.
OllamaChatMessage message = ollamaResponseModel.getMessage();
if (message != null) {
@@ -118,7 +123,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer);
ollamaChatResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) {
if (body.stream
&& ollamaChatResponseModel.getMessage() != null
&& ollamaChatResponseModel.getMessage().getToolCalls() != null) {
wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls();
}
if (finished && body.stream) {
@@ -153,7 +160,8 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
}
/**
* Handles error status codes and appends error messages to the response buffer. Returns true if
* Handles error status codes and appends error messages to the response buffer.
* Returns true if
* an error was handled, false otherwise.
*/
private boolean handleErrorStatus(int statusCode, String line, StringBuilder responseBuffer)