Compare commits

..

3 Commits

6 changed files with 47 additions and 16 deletions

View File

@@ -209,7 +209,6 @@ To download/pull the model into your Ollama server, run the following command in
```shell
ollama pull mistral
```
You can list the models available on your model server by running the following command in your terminal.

View File

@@ -31,7 +31,19 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Tools/Functions
We can create static functions as our tools.
There are two ways to create and register your tools:
1. **Define static or regular methods and register them explicitly as tools.**
You can create standalone functions (static or instance methods) and manually associate them with your tool specifications.
2. **Use annotation-based tool discovery for automatic registration.**
By annotating your tool methods, you can leverage `registerAnnotatedTools()` to automatically scan your classpath, find all annotated tool functions, and register them without extra boilerplate.
Learn more about annotation-based tool registration [here](/apis-generate/chat-with-tools#annotation-based-tool-registration).
Choose the approach that best fits your project—manual for precise control, or annotation-based for easier scaling.
Let's start by exploring the first approach: manually defining and registering your tools/functions.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value.

View File

@@ -8,7 +8,7 @@
"name": "ollama-4-j",
"version": "0.0.0",
"dependencies": {
"@docsearch/js": "^4.2.0",
"@docsearch/js": "^4.1.0",
"@docusaurus/core": "^3.9.2",
"@docusaurus/plugin-content-docs": "^3.9.2",
"@docusaurus/plugin-google-gtag": "^3.9.2",
@@ -3347,9 +3347,9 @@
"license": "MIT"
},
"node_modules/@docsearch/js": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@docsearch/js/-/js-4.2.0.tgz",
"integrity": "sha512-KBHVPO29QiGUFJYeAqxW0oXtGf/aghNmRrIRPT4/28JAefqoCkNn/ZM/jeQ7fHjl0KNM6C+KlLVYjwyz6lNZnA==",
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@docsearch/js/-/js-4.1.0.tgz",
"integrity": "sha512-49+CzeGfOiwG85k+dDvKfOsXLd9PQACoY/FLrZfFOKmpWv166u7bAHmBLdzvxlk8nJ289UgpGf0k6GQZtC85Fg==",
"license": "MIT"
},
"node_modules/@docsearch/react": {

View File

@@ -14,7 +14,7 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docsearch/js": "^4.2.0",
"@docsearch/js": "^4.1.0",
"@docusaurus/core": "^3.9.2",
"@docusaurus/plugin-google-gtag": "^3.9.2",
"@docusaurus/preset-classic": "^3.9.2",

View File

@@ -804,9 +804,21 @@ public class Ollama {
ocm.setResponse(request.getPrompt());
chatRequest.setMessages(msgs);
msgs.add(ocm);
// Merge request's tools and globally registered tools into a new list to avoid mutating the
// original request
List<Tools.Tool> allTools = new ArrayList<>();
if (request.getTools() != null) {
allTools.addAll(request.getTools());
}
List<Tools.Tool> registeredTools = this.getRegisteredTools();
if (registeredTools != null) {
allTools.addAll(registeredTools);
}
OllamaChatTokenHandler hdlr = null;
chatRequest.setUseTools(true);
chatRequest.setTools(request.getTools());
chatRequest.setTools(allTools);
if (streamObserver != null) {
chatRequest.setStream(true);
if (streamObserver.getResponseStreamHandler() != null) {

View File

@@ -44,10 +44,13 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
/**
* Parses streamed Response line from ollama chat. Using {@link
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)}
* should throw
* {@link IllegalArgumentException} in case of null line or {@link
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be null.
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot
* be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be
* null.
*
* @param line streamed line of ollama stream response
* @param responseBuffer Stringbuffer to add latest response message part to
@@ -59,9 +62,11 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
try {
OllamaChatResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
// It seems that under heavy load Ollama responds with an empty chat message part in the
// It seems that under heavy load Ollama responds with an empty chat message
// part in the
// streamed response.
// Thus, we null check the message and hope that the next streamed response has some
// Thus, we null check the message and hope that the next streamed response has
// some
// message content again.
OllamaChatMessage message = ollamaResponseModel.getMessage();
if (message != null) {
@@ -118,7 +123,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer);
ollamaChatResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) {
if (body.stream
&& ollamaChatResponseModel.getMessage() != null
&& ollamaChatResponseModel.getMessage().getToolCalls() != null) {
wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls();
}
if (finished && body.stream) {
@@ -153,7 +160,8 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
}
/**
* Handles error status codes and appends error messages to the response buffer. Returns true if
* Handles error status codes and appends error messages to the response buffer.
* Returns true if
* an error was handled, false otherwise.
*/
private boolean handleErrorStatus(int statusCode, String line, StringBuilder responseBuffer)