Compare commits

..

2 Commits

Author SHA1 Message Date
snyk-bot
349066faf9 fix: upgrade @docsearch/js from 4.1.0 to 4.2.0
Snyk has created this PR to upgrade @docsearch/js from 4.1.0 to 4.2.0.

See this package in npm:
@docsearch/js

See this project in Snyk:
https://app.snyk.io/org/koujalgi.amith/project/9edb01b5-ef5b-48ce-87c6-70599c1d338c?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-11-02 09:21:30 +00:00
Amith Koujalgi
b456feda64 Merge pull request #225 from ollama4j/fix-docs-build
Some checks failed
CodeQL / Analyze (java) (push) Failing after 12s
CodeQL / Analyze (javascript) (push) Failing after 10s
Mark stale issues / stale (push) Failing after 16s
Mark stale issues and PRs / stale (push) Failing after 33s
Update Docs Build GHA
2025-10-23 12:11:35 +05:30
6 changed files with 16 additions and 47 deletions

View File

@@ -209,6 +209,7 @@ To download/pull the model into your Ollama server, run the following command in
```shell
ollama pull mistral
```
You can list the models available on your model server by running the following command in your terminal.

View File

@@ -31,19 +31,7 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Tools/Functions
There are two ways to create and register your tools:
1. **Define static or regular methods and register them explicitly as tools.**
You can create standalone functions (static or instance methods) and manually associate them with your tool specifications.
2. **Use annotation-based tool discovery for automatic registration.**
By annotating your tool methods, you can leverage `registerAnnotatedTools()` to automatically scan your classpath, find all annotated tool functions, and register them without extra boilerplate.
Learn more about annotation-based tool registration [here](/apis-generate/chat-with-tools#annotation-based-tool-registration).
Choose the approach that best fits your project—manual for precise control, or annotation-based for easier scaling.
Let's start by exploring the first approach: manually defining and registering your tools/functions.
We can create static functions as our tools.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value.

View File

@@ -8,7 +8,7 @@
"name": "ollama-4-j",
"version": "0.0.0",
"dependencies": {
"@docsearch/js": "^4.1.0",
"@docsearch/js": "^4.2.0",
"@docusaurus/core": "^3.9.2",
"@docusaurus/plugin-content-docs": "^3.9.2",
"@docusaurus/plugin-google-gtag": "^3.9.2",
@@ -3347,9 +3347,9 @@
"license": "MIT"
},
"node_modules/@docsearch/js": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@docsearch/js/-/js-4.1.0.tgz",
"integrity": "sha512-49+CzeGfOiwG85k+dDvKfOsXLd9PQACoY/FLrZfFOKmpWv166u7bAHmBLdzvxlk8nJ289UgpGf0k6GQZtC85Fg==",
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@docsearch/js/-/js-4.2.0.tgz",
"integrity": "sha512-KBHVPO29QiGUFJYeAqxW0oXtGf/aghNmRrIRPT4/28JAefqoCkNn/ZM/jeQ7fHjl0KNM6C+KlLVYjwyz6lNZnA==",
"license": "MIT"
},
"node_modules/@docsearch/react": {

View File

@@ -14,7 +14,7 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docsearch/js": "^4.1.0",
"@docsearch/js": "^4.2.0",
"@docusaurus/core": "^3.9.2",
"@docusaurus/plugin-google-gtag": "^3.9.2",
"@docusaurus/preset-classic": "^3.9.2",

View File

@@ -804,21 +804,9 @@ public class Ollama {
ocm.setResponse(request.getPrompt());
chatRequest.setMessages(msgs);
msgs.add(ocm);
// Merge request's tools and globally registered tools into a new list to avoid mutating the
// original request
List<Tools.Tool> allTools = new ArrayList<>();
if (request.getTools() != null) {
allTools.addAll(request.getTools());
}
List<Tools.Tool> registeredTools = this.getRegisteredTools();
if (registeredTools != null) {
allTools.addAll(registeredTools);
}
OllamaChatTokenHandler hdlr = null;
chatRequest.setUseTools(true);
chatRequest.setTools(allTools);
chatRequest.setTools(request.getTools());
if (streamObserver != null) {
chatRequest.setStream(true);
if (streamObserver.getResponseStreamHandler() != null) {

View File

@@ -44,15 +44,12 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
/**
* Parses streamed Response line from ollama chat. Using {@link
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)}
* should throw
* com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw
* {@link IllegalArgumentException} in case of null line or {@link
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot
* be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be
* null.
* com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot be parsed to a
* {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be null.
*
* @param line streamed line of ollama stream response
* @param line streamed line of ollama stream response
* @param responseBuffer Stringbuffer to add latest response message part to
* @return TRUE, if ollama-Response has 'done' state
*/
@@ -62,11 +59,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
try {
OllamaChatResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
// It seems that under heavy load Ollama responds with an empty chat message
// part in the
// It seems that under heavy load Ollama responds with an empty chat message part in the
// streamed response.
// Thus, we null check the message and hope that the next streamed response has
// some
// Thus, we null check the message and hope that the next streamed response has some
// message content again.
OllamaChatMessage message = ollamaResponseModel.getMessage();
if (message != null) {
@@ -123,9 +118,7 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer);
ollamaChatResponseModel =
Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
if (body.stream
&& ollamaChatResponseModel.getMessage() != null
&& ollamaChatResponseModel.getMessage().getToolCalls() != null) {
if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) {
wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls();
}
if (finished && body.stream) {
@@ -160,8 +153,7 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
}
/**
* Handles error status codes and appends error messages to the response buffer.
* Returns true if
* Handles error status codes and appends error messages to the response buffer. Returns true if
* an error was handled, false otherwise.
*/
private boolean handleErrorStatus(int statusCode, String line, StringBuilder responseBuffer)