diff --git a/src/main/java/io/github/ollama4j/Ollama.java b/src/main/java/io/github/ollama4j/Ollama.java
index 5357a70..60d7c8f 100644
--- a/src/main/java/io/github/ollama4j/Ollama.java
+++ b/src/main/java/io/github/ollama4j/Ollama.java
@@ -70,10 +70,14 @@ public class Ollama {
*/
@Setter private long requestTimeoutSeconds = 10;
- /** The read timeout in seconds for image URLs. */
+ /**
+ * The read timeout in seconds for image URLs.
+ */
@Setter private int imageURLReadTimeoutSeconds = 10;
- /** The connect timeout in seconds for image URLs. */
+ /**
+ * The connect timeout in seconds for image URLs.
+ */
@Setter private int imageURLConnectTimeoutSeconds = 10;
/**
@@ -280,9 +284,9 @@ public class Ollama {
/**
* Handles retry backoff for pullModel.
*
- * @param modelName the name of the model being pulled
- * @param currentRetry the current retry attempt (zero-based)
- * @param maxRetries the maximum number of retries allowed
+ * @param modelName the name of the model being pulled
+ * @param currentRetry the current retry attempt (zero-based)
+ * @param maxRetries the maximum number of retries allowed
* @param baseDelayMillis the base delay in milliseconds for exponential backoff
* @throws InterruptedException if the thread is interrupted during sleep
*/
@@ -376,7 +380,7 @@ public class Ollama {
* Returns true if the response indicates a successful pull.
*
* @param modelPullResponse the response from the model pull
- * @param modelName the name of the model
+ * @param modelName the name of the model
* @return true if the pull was successful, false otherwise
* @throws OllamaException if the response contains an error
*/
@@ -601,7 +605,7 @@ public class Ollama {
/**
* Deletes a model from the Ollama server.
*
- * @param modelName the name of the model to be deleted
+ * @param modelName the name of the model to be deleted
* @param ignoreIfNotPresent ignore errors if the specified model is not present on the Ollama server
* @throws OllamaException if the response indicates an error status
*/
@@ -758,7 +762,7 @@ public class Ollama {
* Generates a response from a model using the specified parameters and stream observer.
* If {@code streamObserver} is provided, streaming is enabled; otherwise, a synchronous call is made.
*
- * @param request the generation request
+ * @param request the generation request
* @param streamObserver the stream observer for streaming responses, or null for synchronous
* @return the result of the generation
* @throws OllamaException if the request fails
@@ -823,10 +827,10 @@ public class Ollama {
/**
* Generates a response from a model asynchronously, returning a streamer for results.
*
- * @param model the model name
+ * @param model the model name
* @param prompt the prompt to send
- * @param raw whether to use raw mode
- * @param think whether to use "think" mode
+ * @param raw whether to use raw mode
+ * @param think whether to use "think" mode
* @return an OllamaAsyncResultStreamer for streaming results
* @throws OllamaException if the request fails
*/
@@ -861,9 +865,9 @@ public class Ollama {
*
*
Note: the OllamaChatRequestModel#getStream() property is not implemented.
*
- * @param request request object to be sent to the server
+ * @param request request object to be sent to the server
* @param tokenHandler callback handler to handle the last token from stream (caution: the
- * previous tokens from stream will not be concatenated)
+ * previous tokens from stream will not be concatenated)
* @return {@link OllamaChatResult}
* @throws OllamaException if the response indicates an error status
*/
@@ -958,12 +962,16 @@ public class Ollama {
* Registers multiple tools in the tool registry.
*
* @param tools a list of {@link Tools.Tool} objects to register. Each tool contains its
- * specification and function.
+ * specification and function.
*/
public void registerTools(List tools) {
toolRegistry.addTools(tools);
}
+ public List getRegisteredTools() {
+ return toolRegistry.getRegisteredTools();
+ }
+
/**
* Deregisters all tools from the tool registry. This method removes all registered tools,
* effectively clearing the registry.
@@ -979,7 +987,7 @@ public class Ollama {
* and recursively registers annotated tools from all the providers specified in the annotation.
*
* @throws OllamaException if the caller's class is not annotated with {@link
- * OllamaToolService} or if reflection-based instantiation or invocation fails
+ * OllamaToolService} or if reflection-based instantiation or invocation fails
*/
public void registerAnnotatedTools() throws OllamaException {
try {
@@ -1127,7 +1135,7 @@ public class Ollama {
* This method synchronously calls the Ollama API. If a stream handler is provided,
* the request will be streamed; otherwise, a regular synchronous request will be made.
*
- * @param ollamaRequestModel the request model containing necessary parameters for the Ollama API request
+ * @param ollamaRequestModel the request model containing necessary parameters for the Ollama API request
* @param thinkingStreamHandler the stream handler for "thinking" tokens, or null if not used
* @param responseStreamHandler the stream handler to process streaming responses, or null for non-streaming requests
* @return the result of the Ollama API request
diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java
index a10cf77..dd1b2c9 100644
--- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java
+++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java
@@ -11,6 +11,9 @@ package io.github.ollama4j.models.chat;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.tools.Tools;
import io.github.ollama4j.utils.OllamaRequestBody;
+import io.github.ollama4j.utils.Options;
+import java.io.File;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import lombok.Getter;
@@ -20,8 +23,8 @@ import lombok.Setter;
* Defines a Request to use against the ollama /api/chat endpoint.
*
* @see Generate
- * Chat Completion
+ * "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
+ * Chat Completion
*/
@Getter
@Setter
@@ -36,11 +39,15 @@ public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequ
/**
* Controls whether tools are automatically executed.
*
- * If set to {@code true} (the default), tools will be automatically used/applied by the
- * library. If set to {@code false}, tool calls will be returned to the client for manual
+ *
+ * If set to {@code true} (the default), tools will be automatically
+ * used/applied by the
+ * library. If set to {@code false}, tool calls will be returned to the client
+ * for manual
* handling.
*
- *
Disabling this should be an explicit operation.
+ *
+ * Disabling this should be an explicit operation.
*/
private boolean useTools = true;
@@ -57,7 +64,116 @@ public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequ
if (!(o instanceof OllamaChatRequest)) {
return false;
}
-
return this.toString().equals(o.toString());
}
+
+ // --- Builder-like fluent API methods ---
+
+ public static OllamaChatRequest builder() {
+ OllamaChatRequest req = new OllamaChatRequest();
+ req.setMessages(new ArrayList<>());
+ return req;
+ }
+
+ public OllamaChatRequest withModel(String model) {
+ this.setModel(model);
+ return this;
+ }
+
+ public OllamaChatRequest withMessage(OllamaChatMessageRole role, String content) {
+ return withMessage(role, content, Collections.emptyList());
+ }
+
+ public OllamaChatRequest withMessage(
+ OllamaChatMessageRole role, String content, List toolCalls) {
+ if (this.messages == null || this.messages == Collections.EMPTY_LIST) {
+ this.messages = new ArrayList<>();
+ }
+ this.messages.add(new OllamaChatMessage(role, content, null, toolCalls, null));
+ return this;
+ }
+
+ public OllamaChatRequest withMessage(
+ OllamaChatMessageRole role,
+ String content,
+ List toolCalls,
+ List images) {
+ if (this.messages == null || this.messages == Collections.EMPTY_LIST) {
+ this.messages = new ArrayList<>();
+ }
+
+ List imagesAsBytes = new ArrayList<>();
+ if (images != null) {
+ for (File image : images) {
+ try {
+ imagesAsBytes.add(java.nio.file.Files.readAllBytes(image.toPath()));
+ } catch (java.io.IOException e) {
+ throw new RuntimeException(
+ "Failed to read image file: " + image.getAbsolutePath(), e);
+ }
+ }
+ }
+ this.messages.add(new OllamaChatMessage(role, content, null, toolCalls, imagesAsBytes));
+ return this;
+ }
+
+ public OllamaChatRequest withMessages(List messages) {
+ this.setMessages(messages);
+ return this;
+ }
+
+ public OllamaChatRequest withOptions(Options options) {
+ if (options != null) {
+ this.setOptions(options.getOptionsMap());
+ }
+ return this;
+ }
+
+ public OllamaChatRequest withGetJsonResponse() {
+ this.setFormat("json");
+ return this;
+ }
+
+ public OllamaChatRequest withTemplate(String template) {
+ this.setTemplate(template);
+ return this;
+ }
+
+ public OllamaChatRequest withStreaming() {
+ this.setStream(true);
+ return this;
+ }
+
+ public OllamaChatRequest withKeepAlive(String keepAlive) {
+ this.setKeepAlive(keepAlive);
+ return this;
+ }
+
+ public OllamaChatRequest withThinking(boolean think) {
+ this.setThink(think);
+ return this;
+ }
+
+ public OllamaChatRequest withUseTools(boolean useTools) {
+ this.setUseTools(useTools);
+ return this;
+ }
+
+ public OllamaChatRequest withTools(List tools) {
+ this.setTools(tools);
+ return this;
+ }
+
+ public OllamaChatRequest build() {
+ return this;
+ }
+
+ public void reset() {
+ // Only clear the messages, keep model and think as is
+ if (this.messages == null || this.messages == Collections.EMPTY_LIST) {
+ this.messages = new ArrayList<>();
+ } else {
+ this.messages.clear();
+ }
+ }
}
diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java
deleted file mode 100644
index f72759f..0000000
--- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Ollama4j - Java library for interacting with Ollama server.
- * Copyright (c) 2025 Amith Koujalgi and contributors.
- *
- * Licensed under the MIT License (the "License");
- * you may not use this file except in compliance with the License.
- *
-*/
-package io.github.ollama4j.models.chat;
-
-import io.github.ollama4j.utils.Options;
-import io.github.ollama4j.utils.Utils;
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.stream.Collectors;
-import lombok.Setter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/** Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern. */
-public class OllamaChatRequestBuilder {
-
- private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
-
- private int imageURLConnectTimeoutSeconds = 10;
- private int imageURLReadTimeoutSeconds = 10;
- private OllamaChatRequest request;
- @Setter private boolean useTools = true;
-
- private OllamaChatRequestBuilder() {
- request = new OllamaChatRequest();
- request.setMessages(new ArrayList<>());
- }
-
- public static OllamaChatRequestBuilder builder() {
- return new OllamaChatRequestBuilder();
- }
-
- public OllamaChatRequestBuilder withImageURLConnectTimeoutSeconds(
- int imageURLConnectTimeoutSeconds) {
- this.imageURLConnectTimeoutSeconds = imageURLConnectTimeoutSeconds;
- return this;
- }
-
- public OllamaChatRequestBuilder withImageURLReadTimeoutSeconds(int imageURLReadTimeoutSeconds) {
- this.imageURLReadTimeoutSeconds = imageURLReadTimeoutSeconds;
- return this;
- }
-
- public OllamaChatRequestBuilder withModel(String model) {
- request.setModel(model);
- return this;
- }
-
- public void reset() {
- request = new OllamaChatRequest(request.getModel(), request.isThink(), new ArrayList<>());
- }
-
- public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content) {
- return withMessage(role, content, Collections.emptyList());
- }
-
- public OllamaChatRequestBuilder withMessage(
- OllamaChatMessageRole role, String content, List toolCalls) {
- List messages = this.request.getMessages();
- messages.add(new OllamaChatMessage(role, content, null, toolCalls, null));
- return this;
- }
-
- public OllamaChatRequestBuilder withMessage(
- OllamaChatMessageRole role,
- String content,
- List toolCalls,
- List images) {
- List messages = this.request.getMessages();
- List binaryImages =
- images.stream()
- .map(
- file -> {
- try {
- return Files.readAllBytes(file.toPath());
- } catch (IOException e) {
- LOG.warn(
- "File '{}' could not be accessed, will not add to"
- + " message!",
- file.toPath(),
- e);
- return new byte[0];
- }
- })
- .collect(Collectors.toList());
- messages.add(new OllamaChatMessage(role, content, null, toolCalls, binaryImages));
- return this;
- }
-
- public OllamaChatRequestBuilder withMessage(
- OllamaChatMessageRole role,
- String content,
- List toolCalls,
- String... imageUrls)
- throws IOException, InterruptedException {
- List messages = this.request.getMessages();
- List binaryImages = null;
- if (imageUrls.length > 0) {
- binaryImages = new ArrayList<>();
- for (String imageUrl : imageUrls) {
- try {
- binaryImages.add(
- Utils.loadImageBytesFromUrl(
- imageUrl,
- imageURLConnectTimeoutSeconds,
- imageURLReadTimeoutSeconds));
- } catch (InterruptedException e) {
- LOG.error("Failed to load image from URL: '{}'. Cause: {}", imageUrl, e);
- Thread.currentThread().interrupt();
- throw new InterruptedException(
- "Interrupted while loading image from URL: " + imageUrl);
- } catch (IOException e) {
- LOG.error(
- "IOException occurred while loading image from URL '{}'. Cause: {}",
- imageUrl,
- e.getMessage(),
- e);
- throw new IOException(
- "IOException while loading image from URL: " + imageUrl, e);
- }
- }
- }
- messages.add(new OllamaChatMessage(role, content, null, toolCalls, binaryImages));
- return this;
- }
-
- public OllamaChatRequestBuilder withMessages(List messages) {
- request.setMessages(messages);
- return this;
- }
-
- public OllamaChatRequestBuilder withOptions(Options options) {
- this.request.setOptions(options.getOptionsMap());
- return this;
- }
-
- public OllamaChatRequestBuilder withGetJsonResponse() {
- this.request.setFormat("json");
- return this;
- }
-
- public OllamaChatRequestBuilder withTemplate(String template) {
- this.request.setTemplate(template);
- return this;
- }
-
- public OllamaChatRequestBuilder withStreaming() {
- this.request.setStream(true);
- return this;
- }
-
- public OllamaChatRequestBuilder withKeepAlive(String keepAlive) {
- this.request.setKeepAlive(keepAlive);
- return this;
- }
-
- public OllamaChatRequestBuilder withThinking(boolean think) {
- this.request.setThink(think);
- return this;
- }
-
- public OllamaChatRequest build() {
- request.setUseTools(useTools);
- return request;
- }
-}
diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java
index 05ad9c8..fb0d7a8 100644
--- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java
+++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java
@@ -11,7 +11,14 @@ package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.tools.Tools;
import io.github.ollama4j.utils.OllamaRequestBody;
+import io.github.ollama4j.utils.Options;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Base64;
import java.util.List;
+import java.util.Map;
import lombok.Getter;
import lombok.Setter;
@@ -41,6 +48,100 @@ public class OllamaGenerateRequest extends OllamaCommonRequest implements Ollama
this.images = images;
}
+ // --- Builder-style methods ---
+
+ public static OllamaGenerateRequest builder() {
+ return new OllamaGenerateRequest();
+ }
+
+ public OllamaGenerateRequest withPrompt(String prompt) {
+ this.setPrompt(prompt);
+ return this;
+ }
+
+ public OllamaGenerateRequest withTools(List tools) {
+ this.setTools(tools);
+ return this;
+ }
+
+ public OllamaGenerateRequest withModel(String model) {
+ this.setModel(model);
+ return this;
+ }
+
+ public OllamaGenerateRequest withGetJsonResponse() {
+ this.setFormat("json");
+ return this;
+ }
+
+ public OllamaGenerateRequest withOptions(Options options) {
+ this.setOptions(options.getOptionsMap());
+ return this;
+ }
+
+ public OllamaGenerateRequest withTemplate(String template) {
+ this.setTemplate(template);
+ return this;
+ }
+
+ public OllamaGenerateRequest withStreaming(boolean streaming) {
+ this.setStream(streaming);
+ return this;
+ }
+
+ public OllamaGenerateRequest withKeepAlive(String keepAlive) {
+ this.setKeepAlive(keepAlive);
+ return this;
+ }
+
+ public OllamaGenerateRequest withRaw(boolean raw) {
+ this.setRaw(raw);
+ return this;
+ }
+
+ public OllamaGenerateRequest withThink(boolean think) {
+ this.setThink(think);
+ return this;
+ }
+
+ public OllamaGenerateRequest withUseTools(boolean useTools) {
+ this.setUseTools(useTools);
+ return this;
+ }
+
+ public OllamaGenerateRequest withFormat(Map format) {
+ this.setFormat(format);
+ return this;
+ }
+
+ public OllamaGenerateRequest withSystem(String system) {
+ this.setSystem(system);
+ return this;
+ }
+
+ public OllamaGenerateRequest withContext(String context) {
+ this.setContext(context);
+ return this;
+ }
+
+ public OllamaGenerateRequest withImagesBase64(List images) {
+ this.setImages(images);
+ return this;
+ }
+
+ public OllamaGenerateRequest withImages(List imageFiles) throws IOException {
+ List images = new ArrayList<>();
+ for (File imageFile : imageFiles) {
+ images.add(Base64.getEncoder().encodeToString(Files.readAllBytes(imageFile.toPath())));
+ }
+ this.setImages(images);
+ return this;
+ }
+
+ public OllamaGenerateRequest build() {
+ return this;
+ }
+
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequest)) {
diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java
deleted file mode 100644
index 0717f9e..0000000
--- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Ollama4j - Java library for interacting with Ollama server.
- * Copyright (c) 2025 Amith Koujalgi and contributors.
- *
- * Licensed under the MIT License (the "License");
- * you may not use this file except in compliance with the License.
- *
-*/
-package io.github.ollama4j.models.generate;
-
-import io.github.ollama4j.tools.Tools;
-import io.github.ollama4j.utils.Options;
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Base64;
-import java.util.List;
-
-/** Helper class for creating {@link OllamaGenerateRequest} objects using the builder-pattern. */
-public class OllamaGenerateRequestBuilder {
-
- private OllamaGenerateRequestBuilder() {
- request = new OllamaGenerateRequest();
- }
-
- private OllamaGenerateRequest request;
-
- public static OllamaGenerateRequestBuilder builder() {
- return new OllamaGenerateRequestBuilder();
- }
-
- public OllamaGenerateRequest build() {
- return request;
- }
-
- public OllamaGenerateRequestBuilder withPrompt(String prompt) {
- request.setPrompt(prompt);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withTools(List tools) {
- request.setTools(tools);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withModel(String model) {
- request.setModel(model);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withGetJsonResponse() {
- this.request.setFormat("json");
- return this;
- }
-
- public OllamaGenerateRequestBuilder withOptions(Options options) {
- this.request.setOptions(options.getOptionsMap());
- return this;
- }
-
- public OllamaGenerateRequestBuilder withTemplate(String template) {
- this.request.setTemplate(template);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withStreaming(boolean streaming) {
- this.request.setStream(streaming);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive) {
- this.request.setKeepAlive(keepAlive);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withRaw(boolean raw) {
- this.request.setRaw(raw);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withThink(boolean think) {
- this.request.setThink(think);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withUseTools(boolean useTools) {
- this.request.setUseTools(useTools);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withFormat(java.util.Map format) {
- this.request.setFormat(format);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withSystem(String system) {
- this.request.setSystem(system);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withContext(String context) {
- this.request.setContext(context);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withImagesBase64(java.util.List images) {
- this.request.setImages(images);
- return this;
- }
-
- public OllamaGenerateRequestBuilder withImages(java.util.List imageFiles)
- throws IOException {
- java.util.List images = new ArrayList<>();
- for (File imageFile : imageFiles) {
- images.add(Base64.getEncoder().encodeToString(Files.readAllBytes(imageFile.toPath())));
- }
- this.request.setImages(images);
- return this;
- }
-}
diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java
index a08cd18..6e86b0d 100644
--- a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java
+++ b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java
@@ -96,7 +96,6 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
getRequestBuilderDefault(uri).POST(body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
LOG.debug("Asking model: {}", body);
- System.out.println("Asking model: " + Utils.toJSON(body));
HttpResponse response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
diff --git a/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java b/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java
index 7e8ea90..e88e0bc 100644
--- a/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java
+++ b/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java
@@ -18,7 +18,6 @@ import io.github.ollama4j.models.chat.*;
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
import io.github.ollama4j.models.embed.OllamaEmbedResult;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
-import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.response.Model;
import io.github.ollama4j.models.response.ModelDetail;
@@ -272,7 +271,7 @@ class OllamaIntegrationTest {
format.put("required", List.of("isNoon"));
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(TOOLS_MODEL)
.withPrompt(prompt)
.withFormat(format)
@@ -299,7 +298,7 @@ class OllamaIntegrationTest {
boolean raw = false;
boolean thinking = false;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt(
"What is the capital of France? And what's France's connection with"
@@ -327,7 +326,7 @@ class OllamaIntegrationTest {
api.pullModel(GENERAL_PURPOSE_MODEL);
boolean raw = false;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt(
"What is the capital of France? And what's France's connection with"
@@ -357,8 +356,7 @@ class OllamaIntegrationTest {
void shouldGenerateWithCustomOptions() throws OllamaException {
api.pullModel(GENERAL_PURPOSE_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.SYSTEM,
@@ -390,8 +388,7 @@ class OllamaIntegrationTest {
String expectedResponse = "Bhai";
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.SYSTEM,
@@ -429,8 +426,7 @@ class OllamaIntegrationTest {
@Order(10)
void shouldChatWithHistory() throws Exception {
api.pullModel(THINKING_TOOL_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(THINKING_TOOL_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
@@ -481,8 +477,7 @@ class OllamaIntegrationTest {
void shouldChatWithExplicitTool() throws OllamaException {
String theToolModel = TOOLS_MODEL;
api.pullModel(theToolModel);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(theToolModel);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(theToolModel);
api.registerTool(EmployeeFinderToolSpec.getSpecification());
@@ -534,8 +529,7 @@ class OllamaIntegrationTest {
void shouldChatWithExplicitToolAndUseTools() throws OllamaException {
String theToolModel = TOOLS_MODEL;
api.pullModel(theToolModel);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(theToolModel);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(theToolModel);
api.registerTool(EmployeeFinderToolSpec.getSpecification());
@@ -579,8 +573,7 @@ class OllamaIntegrationTest {
String theToolModel = TOOLS_MODEL;
api.pullModel(theToolModel);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(theToolModel);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(theToolModel);
api.registerTool(EmployeeFinderToolSpec.getSpecification());
@@ -633,8 +626,7 @@ class OllamaIntegrationTest {
void shouldChatWithAnnotatedToolSingleParam() throws OllamaException {
String theToolModel = TOOLS_MODEL;
api.pullModel(theToolModel);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(theToolModel);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(theToolModel);
api.registerAnnotatedTools();
@@ -680,8 +672,7 @@ class OllamaIntegrationTest {
void shouldChatWithAnnotatedToolMultipleParams() throws OllamaException {
String theToolModel = TOOLS_MODEL;
api.pullModel(theToolModel);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(theToolModel);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(theToolModel);
api.registerAnnotatedTools(new AnnotatedTool());
@@ -712,8 +703,7 @@ class OllamaIntegrationTest {
void shouldChatWithStream() throws OllamaException {
api.deregisterTools();
api.pullModel(GENERAL_PURPOSE_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -739,8 +729,7 @@ class OllamaIntegrationTest {
@Order(15)
void shouldChatWithThinkingAndStream() throws OllamaException {
api.pullModel(THINKING_TOOL_MODEL_2);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL_2);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(THINKING_TOOL_MODEL_2);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -758,32 +747,6 @@ class OllamaIntegrationTest {
assertNotNull(chatResult.getResponseModel().getMessage().getResponse());
}
- /**
- * Tests chat API with an image input from a URL.
- *
- * Scenario: Sends a user message with an image URL and verifies the assistant's response.
- * Usage: chat, vision model, image from URL, no tools, no thinking, no streaming.
- */
- @Test
- @Order(10)
- void shouldChatWithImageFromURL() throws OllamaException, IOException, InterruptedException {
- api.pullModel(VISION_MODEL);
-
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(VISION_MODEL);
- OllamaChatRequest requestModel =
- builder.withMessage(
- OllamaChatMessageRole.USER,
- "What's in the picture?",
- Collections.emptyList(),
- "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
- .build();
- api.registerAnnotatedTools(new OllamaIntegrationTest());
-
- OllamaChatResult chatResult = api.chat(requestModel, null);
- assertNotNull(chatResult);
- }
-
/**
* Tests chat API with an image input from a file and multi-turn history.
*
@@ -795,8 +758,7 @@ class OllamaIntegrationTest {
@Order(10)
void shouldChatWithImageFromFileAndHistory() throws OllamaException {
api.pullModel(VISION_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(VISION_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(VISION_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -832,7 +794,7 @@ class OllamaIntegrationTest {
api.pullModel(VISION_MODEL);
try {
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(VISION_MODEL)
.withPrompt("What is in this image?")
.withRaw(false)
@@ -865,7 +827,7 @@ class OllamaIntegrationTest {
void shouldGenerateWithImageFilesAndResponseStreamed() throws OllamaException, IOException {
api.pullModel(VISION_MODEL);
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(VISION_MODEL)
.withPrompt("What is in this image?")
.withRaw(false)
@@ -900,7 +862,7 @@ class OllamaIntegrationTest {
boolean think = true;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(THINKING_TOOL_MODEL)
.withPrompt("Who are you?")
.withRaw(raw)
@@ -929,7 +891,7 @@ class OllamaIntegrationTest {
api.pullModel(THINKING_TOOL_MODEL);
boolean raw = false;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(THINKING_TOOL_MODEL)
.withPrompt("Who are you?")
.withRaw(raw)
@@ -967,7 +929,7 @@ class OllamaIntegrationTest {
boolean raw = true;
boolean thinking = false;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt("What is 2+2?")
.withRaw(raw)
@@ -995,7 +957,7 @@ class OllamaIntegrationTest {
api.pullModel(GENERAL_PURPOSE_MODEL);
boolean raw = true;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt("What is the largest planet in our solar system?")
.withRaw(raw)
@@ -1028,7 +990,7 @@ class OllamaIntegrationTest {
// 'response' tokens
boolean raw = true;
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(THINKING_TOOL_MODEL)
.withPrompt(
"Count 1 to 5. Just give me the numbers and do not give any other"
@@ -1093,7 +1055,7 @@ class OllamaIntegrationTest {
format.put("required", List.of("cities"));
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(TOOLS_MODEL)
.withPrompt(prompt)
.withFormat(format)
@@ -1119,8 +1081,7 @@ class OllamaIntegrationTest {
@Order(26)
void shouldChatWithThinkingNoStream() throws OllamaException {
api.pullModel(THINKING_TOOL_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(THINKING_TOOL_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -1149,8 +1110,7 @@ class OllamaIntegrationTest {
void shouldChatWithCustomOptionsAndStreaming() throws OllamaException {
api.pullModel(GENERAL_PURPOSE_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -1184,8 +1144,7 @@ class OllamaIntegrationTest {
api.registerTool(EmployeeFinderToolSpec.getSpecification());
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL_2);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(THINKING_TOOL_MODEL_2);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -1219,8 +1178,7 @@ class OllamaIntegrationTest {
File image1 = getImageFileFromClasspath("emoji-smile.jpeg");
File image2 = getImageFileFromClasspath("roses.jpg");
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(VISION_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(VISION_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
OllamaChatMessageRole.USER,
@@ -1247,7 +1205,7 @@ class OllamaIntegrationTest {
void shouldHandleNonExistentModel() {
String nonExistentModel = "this-model-does-not-exist:latest";
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(nonExistentModel)
.withPrompt("Hello")
.withRaw(false)
@@ -1274,8 +1232,7 @@ class OllamaIntegrationTest {
api.pullModel(GENERAL_PURPOSE_MODEL);
List tools = Collections.emptyList();
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, " ", tools) // whitespace only
.build();
@@ -1298,7 +1255,7 @@ class OllamaIntegrationTest {
void shouldGenerateWithExtremeParameters() throws OllamaException {
api.pullModel(GENERAL_PURPOSE_MODEL);
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt("Generate a random word")
.withRaw(false)
@@ -1351,8 +1308,7 @@ class OllamaIntegrationTest {
void shouldChatWithKeepAlive() throws OllamaException {
api.pullModel(GENERAL_PURPOSE_MODEL);
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL);
+ OllamaChatRequest builder = OllamaChatRequest.builder().withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "Hello, how are you?")
.withKeepAlive("5m") // Keep model loaded for 5 minutes
@@ -1376,7 +1332,7 @@ class OllamaIntegrationTest {
void shouldGenerateWithAdvancedOptions() throws OllamaException {
api.pullModel(GENERAL_PURPOSE_MODEL);
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL)
.withPrompt("Write a detailed explanation of machine learning")
.withRaw(false)
@@ -1421,8 +1377,8 @@ class OllamaIntegrationTest {
new Thread(
() -> {
try {
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder()
+ OllamaChatRequest builder =
+ OllamaChatRequest.builder()
.withModel(GENERAL_PURPOSE_MODEL);
OllamaChatRequest requestModel =
builder.withMessage(
diff --git a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
index 091830e..dc21e5f 100644
--- a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
+++ b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
@@ -13,7 +13,6 @@ import static org.junit.jupiter.api.Assertions.*;
import io.github.ollama4j.Ollama;
import io.github.ollama4j.exceptions.OllamaException;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
-import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.samples.AnnotatedTool;
@@ -205,7 +204,7 @@ public class WithAuth {
format.put("required", List.of("isNoon"));
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(model)
.withPrompt(prompt)
.withRaw(false)
diff --git a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java
index 67ab5e6..a614b82 100644
--- a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java
+++ b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java
@@ -19,7 +19,6 @@ import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
import io.github.ollama4j.models.embed.OllamaEmbedResult;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
-import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.request.CustomModelRequest;
import io.github.ollama4j.models.response.ModelDetail;
@@ -158,7 +157,7 @@ class TestMockedAPIs {
OllamaGenerateStreamObserver observer = new OllamaGenerateStreamObserver(null, null);
try {
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(model)
.withPrompt(prompt)
.withRaw(false)
@@ -180,7 +179,7 @@ class TestMockedAPIs {
String prompt = "some prompt text";
try {
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(model)
.withPrompt(prompt)
.withRaw(false)
@@ -206,7 +205,7 @@ class TestMockedAPIs {
String prompt = "some prompt text";
try {
OllamaGenerateRequest request =
- OllamaGenerateRequestBuilder.builder()
+ OllamaGenerateRequest.builder()
.withModel(model)
.withPrompt(prompt)
.withRaw(false)
diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java
index 7b069a6..03816f9 100644
--- a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java
+++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java
@@ -12,15 +12,14 @@ import static org.junit.jupiter.api.Assertions.*;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import org.junit.jupiter.api.Test;
class TestOllamaChatRequestBuilder {
@Test
void testResetClearsMessagesButKeepsModelAndThink() {
- OllamaChatRequestBuilder builder =
- OllamaChatRequestBuilder.builder()
+ OllamaChatRequest builder =
+ OllamaChatRequest.builder()
.withModel("my-model")
.withThinking(true)
.withMessage(OllamaChatMessageRole.USER, "first");
diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java
index ec6721b..9471a9c 100644
--- a/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java
+++ b/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java
@@ -13,7 +13,6 @@ import static org.junit.jupiter.api.Assertions.assertThrowsExactly;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.File;
import java.util.Collections;
@@ -24,11 +23,11 @@ import org.junit.jupiter.api.Test;
public class TestChatRequestSerialization extends AbstractSerializationTest {
- private OllamaChatRequestBuilder builder;
+ private OllamaChatRequest builder;
@BeforeEach
public void init() {
- builder = OllamaChatRequestBuilder.builder().withModel("DummyModel");
+ builder = OllamaChatRequest.builder().withModel("DummyModel");
}
@Test
diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java b/src/test/java/io/github/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java
index 2fe2fdc..f61baed 100644
--- a/src/test/java/io/github/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java
+++ b/src/test/java/io/github/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java
@@ -11,7 +11,6 @@ package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
-import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.utils.OptionsBuilder;
import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach;
@@ -19,16 +18,17 @@ import org.junit.jupiter.api.Test;
class TestGenerateRequestSerialization extends AbstractSerializationTest {
- private OllamaGenerateRequestBuilder builder;
+ private OllamaGenerateRequest builder;
@BeforeEach
public void init() {
- builder = OllamaGenerateRequestBuilder.builder().withModel("Dummy Model");
+ builder = OllamaGenerateRequest.builder().withModel("Dummy Model");
}
@Test
public void testRequestOnlyMandatoryFields() {
- OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
+ OllamaGenerateRequest req =
+ builder.withPrompt("Some prompt").withModel("Dummy Model").build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
@@ -38,7 +38,10 @@ class TestGenerateRequestSerialization extends AbstractSerializationTest