diff --git a/src/main/java/io/github/ollama4j/OllamaAPI.java b/src/main/java/io/github/ollama4j/OllamaAPI.java
index a399adf..90c747f 100644
--- a/src/main/java/io/github/ollama4j/OllamaAPI.java
+++ b/src/main/java/io/github/ollama4j/OllamaAPI.java
@@ -1091,7 +1091,7 @@ public class OllamaAPI {
* @return an {@link OllamaAsyncResultStreamer} handle for polling and
* retrieving streamed results
*/
- public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw, boolean think) {
+ public OllamaAsyncResultStreamer generate(String model, String prompt, boolean raw, boolean think) {
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw);
ollamaRequestModel.setThink(think);
@@ -1103,147 +1103,57 @@ public class OllamaAPI {
}
/**
- * With one or more image files, ask a question to a model running on Ollama
- * server. This is a
- * sync/blocking call.
- *
- * @param model the ollama model to ask the question to
- * @param prompt the prompt/question text
- * @param imageFiles the list of image files to use for the question
- * @param options the Options object - More
- * details on the options
- * @param streamHandler optional callback consumer that will be applied every
- * time a streamed response is received. If not set, the
- * stream parameter of the request is set to false.
- * @return OllamaResult that includes response text and time taken for response
- * @throws OllamaBaseException if the response indicates an error status
- * @throws IOException if an I/O error occurs during the HTTP request
- * @throws InterruptedException if the operation is interrupted
- */
- public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options,
- OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
- List images = new ArrayList<>();
- for (File imageFile : imageFiles) {
- images.add(encodeFileToBase64(imageFile));
- }
- OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
- ollamaRequestModel.setOptions(options.getOptionsMap());
- return generateSyncForOllamaRequestModel(ollamaRequestModel, null, streamHandler);
- }
-
- /**
- * Convenience method to call Ollama API without streaming responses.
+ * Generates a response from a model running on the Ollama server using one or more images as input.
*
- * Uses
- * {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
+ * This method allows you to provide images (as {@link File}, {@code byte[]}, or image URL {@link String})
+ * along with a prompt to the specified model. The images are automatically encoded as base64 before being sent.
+ * Additional model options can be specified via the {@link Options} parameter.
+ *
*
- * @throws OllamaBaseException if the response indicates an error status
- * @throws IOException if an I/O error occurs during the HTTP request
- * @throws InterruptedException if the operation is interrupted
- */
- public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options)
- throws OllamaBaseException, IOException, InterruptedException {
- return generateWithImageFiles(model, prompt, imageFiles, options, null);
- }
-
- /**
- * With one or more image URLs, ask a question to a model running on Ollama
- * server. This is a
- * sync/blocking call.
- *
- * @param model the ollama model to ask the question to
- * @param prompt the prompt/question text
- * @param imageURLs the list of image URLs to use for the question
- * @param options the Options object - More
- * details on the options
- * @param streamHandler optional callback consumer that will be applied every
- * time a streamed response is received. If not set, the
- * stream parameter of the request is set to false.
- * @return OllamaResult that includes response text and time taken for response
- * @throws OllamaBaseException if the response indicates an error status
- * @throws IOException if an I/O error occurs during the HTTP request
- * @throws InterruptedException if the operation is interrupted
- * @throws URISyntaxException if the URI for the request is malformed
- */
- public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options,
- OllamaStreamHandler streamHandler)
- throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
- List images = new ArrayList<>();
- for (String imageURL : imageURLs) {
- images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
- }
- OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
- ollamaRequestModel.setOptions(options.getOptionsMap());
- return generateSyncForOllamaRequestModel(ollamaRequestModel, null, streamHandler);
- }
-
- /**
- * Convenience method to call Ollama API without streaming responses.
*
- * Uses
- * {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
+ * If a {@code streamHandler} is provided, the response will be streamed and the handler will be called
+ * for each streamed response chunk. If {@code streamHandler} is {@code null}, streaming is disabled and
+ * the full response is returned synchronously.
+ *
*
- * @throws OllamaBaseException if the response indicates an error status
- * @throws IOException if an I/O error occurs during the HTTP request
- * @throws InterruptedException if the operation is interrupted
- * @throws URISyntaxException if the URI for the request is malformed
- */
- public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options)
- throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
- return generateWithImageURLs(model, prompt, imageURLs, options, null);
- }
-
- /**
- * Synchronously generates a response using a list of image byte arrays.
- *
- * This method encodes the provided byte arrays into Base64 and sends them to
- * the Ollama server.
- *
- * @param model the Ollama model to use for generating the response
+ * @param model the name of the Ollama model to use for generating the response
* @param prompt the prompt or question text to send to the model
- * @param images the list of image data as byte arrays
- * @param options the Options object - More
- * details on the options
- * @param streamHandler optional callback that will be invoked with each
- * streamed response; if null, streaming is disabled
- * @return OllamaResult containing the response text and the time taken for the
- * response
- * @throws OllamaBaseException if the response indicates an error status
+ * @param images a list of images to use for the question; each element must be a {@link File}, {@code byte[]}, or a URL {@link String}
+ * @param options the {@link Options} object containing model parameters;
+ * see Ollama model options documentation
+ * @param streamHandler an optional callback that is invoked for each streamed response chunk;
+ * if {@code null}, disables streaming and returns the full response synchronously
+ * @return an {@link OllamaResult} containing the response text and time taken for the response
+ * @throws OllamaBaseException if the response indicates an error status or an invalid image type is provided
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
+ * @throws URISyntaxException if an image URL is malformed
*/
- public OllamaResult generateWithImages(String model, String prompt, List images, Options options,
- OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
+ public OllamaResult generateWithImages(String model, String prompt, List