From 407b7eb280ab01f9ff48895bb24d82f270c6082e Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 24 Mar 2025 00:25:20 +0530 Subject: [PATCH] Refactor OllamaAPI documentation and add structured response model - Improved formatting and readability of comments in OllamaAPI.java. - Introduced OllamaStructuredResult class to handle structured responses from the Ollama API. - Updated integration tests to include a new test for structured output from the API. - Cleaned up imports and ensured consistent code style across the OllamaAPIIntegrationTest class. --- .../java/io/github/ollama4j/OllamaAPI.java | 512 ++++++++++++------ .../response/OllamaStructuredResult.java | 22 + .../OllamaAPIIntegrationTest.java | 244 +++------ 3 files changed, 467 insertions(+), 311 deletions(-) create mode 100644 src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java diff --git a/src/main/java/io/github/ollama4j/OllamaAPI.java b/src/main/java/io/github/ollama4j/OllamaAPI.java index b0d21c4..5229fe9 100644 --- a/src/main/java/io/github/ollama4j/OllamaAPI.java +++ b/src/main/java/io/github/ollama4j/OllamaAPI.java @@ -77,7 +77,8 @@ public class OllamaAPI { private final ToolRegistry toolRegistry = new ToolRegistry(); /** - * Instantiates the Ollama API with default Ollama host: http://localhost:11434 + * Instantiates the Ollama API with default Ollama host: + * http://localhost:11434 **/ public OllamaAPI() { this.host = "http://localhost:11434"; @@ -100,7 +101,8 @@ public class OllamaAPI { } /** - * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. + * Set basic authentication for accessing Ollama server that's behind a + * reverse-proxy/gateway. * * @param username the username * @param password the password @@ -110,7 +112,8 @@ public class OllamaAPI { } /** - * Set Bearer authentication for accessing Ollama server that's behind a reverse-proxy/gateway. + * Set Bearer authentication for accessing Ollama server that's behind a + * reverse-proxy/gateway. * * @param bearerToken the Bearer authentication token to provide */ @@ -128,7 +131,8 @@ public class OllamaAPI { HttpClient httpClient = HttpClient.newHttpClient(); HttpRequest httpRequest = null; try { - httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -145,7 +149,8 @@ public class OllamaAPI { } /** - * Provides a list of running models and details about each model currently loaded into memory. + * Provides a list of running models and details about each model currently + * loaded into memory. * * @return ModelsProcessResponse containing details about the running models * @throws IOException if an I/O error occurs during the HTTP request @@ -157,7 +162,8 @@ public class OllamaAPI { HttpClient httpClient = HttpClient.newHttpClient(); HttpRequest httpRequest = null; try { - httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -184,7 +190,8 @@ public class OllamaAPI { public List listModels() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { String url = this.host + "/api/tags"; HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); String responseString = response.body(); @@ -196,19 +203,28 @@ public class OllamaAPI { } /** - * Retrieves a list of models from the Ollama library. This method fetches the available models directly from Ollama - * library page, including model details such as the name, pull count, popular tags, tag count, and the time when model was updated. + * Retrieves a list of models from the Ollama library. This method fetches the + * available models directly from Ollama + * library page, including model details such as the name, pull count, popular + * tags, tag count, and the time when model was updated. * - * @return A list of {@link LibraryModel} objects representing the models available in the Ollama library. - * @throws OllamaBaseException If the HTTP request fails or the response is not successful (non-200 status code). - * @throws IOException If an I/O error occurs during the HTTP request or response processing. - * @throws InterruptedException If the thread executing the request is interrupted. - * @throws URISyntaxException If there is an error creating the URI for the HTTP request. + * @return A list of {@link LibraryModel} objects representing the models + * available in the Ollama library. + * @throws OllamaBaseException If the HTTP request fails or the response is not + * successful (non-200 status code). + * @throws IOException If an I/O error occurs during the HTTP request + * or response processing. + * @throws InterruptedException If the thread executing the request is + * interrupted. + * @throws URISyntaxException If there is an error creating the URI for the + * HTTP request. */ - public List listModelsFromLibrary() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + public List listModelsFromLibrary() + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { String url = "https://ollama.com/library"; HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); String responseString = response.body(); @@ -223,7 +239,8 @@ public class OllamaAPI { Elements pullCounts = e.select("div:nth-of-type(2) > p > span:first-of-type > span:first-of-type"); Elements popularTags = e.select("div > div > span"); Elements totalTags = e.select("div:nth-of-type(2) > p > span:nth-of-type(2) > span:first-of-type"); - Elements lastUpdatedTime = e.select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)"); + Elements lastUpdatedTime = e + .select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)"); if (names.first() == null || names.isEmpty()) { // if name cannot be extracted, skip. @@ -231,9 +248,12 @@ public class OllamaAPI { } Optional.ofNullable(names.first()).map(Element::text).ifPresent(model::setName); model.setDescription(Optional.ofNullable(desc.first()).map(Element::text).orElse("")); - model.setPopularTags(Optional.of(popularTags).map(tags -> tags.stream().map(Element::text).collect(Collectors.toList())).orElse(new ArrayList<>())); + model.setPopularTags(Optional.of(popularTags) + .map(tags -> tags.stream().map(Element::text).collect(Collectors.toList())) + .orElse(new ArrayList<>())); model.setPullCount(Optional.ofNullable(pullCounts.first()).map(Element::text).orElse("")); - model.setTotalTags(Optional.ofNullable(totalTags.first()).map(Element::text).map(Integer::parseInt).orElse(0)); + model.setTotalTags( + Optional.ofNullable(totalTags.first()).map(Element::text).map(Integer::parseInt).orElse(0)); model.setLastUpdated(Optional.ofNullable(lastUpdatedTime.first()).map(Element::text).orElse("")); models.add(model); @@ -246,22 +266,32 @@ public class OllamaAPI { /** * Fetches the tags associated with a specific model from Ollama library. - * This method fetches the available model tags directly from Ollama library model page, including model tag name, size and time when model was last updated + * This method fetches the available model tags directly from Ollama library + * model page, including model tag name, size and time when model was last + * updated * into a list of {@link LibraryModelTag} objects. * - * @param libraryModel the {@link LibraryModel} object which contains the name of the library model + * @param libraryModel the {@link LibraryModel} object which contains the name + * of the library model * for which the tags need to be fetched. - * @return a list of {@link LibraryModelTag} objects containing the extracted tags and their associated metadata. - * @throws OllamaBaseException if the HTTP response status code indicates an error (i.e., not 200 OK), - * or if there is any other issue during the request or response processing. - * @throws IOException if an input/output exception occurs during the HTTP request or response handling. - * @throws InterruptedException if the thread is interrupted while waiting for the HTTP response. + * @return a list of {@link LibraryModelTag} objects containing the extracted + * tags and their associated metadata. + * @throws OllamaBaseException if the HTTP response status code indicates an + * error (i.e., not 200 OK), + * or if there is any other issue during the + * request or response processing. + * @throws IOException if an input/output exception occurs during the + * HTTP request or response handling. + * @throws InterruptedException if the thread is interrupted while waiting for + * the HTTP response. * @throws URISyntaxException if the URI format is incorrect or invalid. */ - public LibraryModelDetail getLibraryModelDetails(LibraryModel libraryModel) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + public LibraryModelDetail getLibraryModelDetails(LibraryModel libraryModel) + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { String url = String.format("https://ollama.com/library/%s/tags", libraryModel.getName()); HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); String responseString = response.body(); @@ -269,7 +299,8 @@ public class OllamaAPI { List libraryModelTags = new ArrayList<>(); if (statusCode == 200) { Document doc = Jsoup.parse(responseString); - Elements tagSections = doc.select("html > body > main > div > section > div > div > div:nth-child(n+2) > div"); + Elements tagSections = doc + .select("html > body > main > div > section > div > div > div:nth-child(n+2) > div"); for (Element e : tagSections) { Elements tags = e.select("div > a > div"); Elements tagsMetas = e.select("div > span"); @@ -282,8 +313,11 @@ public class OllamaAPI { } libraryModelTag.setName(libraryModel.getName()); Optional.ofNullable(tags.first()).map(Element::text).ifPresent(libraryModelTag::setTag); - libraryModelTag.setSize(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[1].trim()).orElse("")); - libraryModelTag.setLastUpdated(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[2].trim()).orElse("")); + libraryModelTag.setSize(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")) + .filter(parts -> parts.length > 1).map(parts -> parts[1].trim()).orElse("")); + libraryModelTag + .setLastUpdated(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")) + .filter(parts -> parts.length > 1).map(parts -> parts[2].trim()).orElse("")); libraryModelTags.add(libraryModelTag); } LibraryModelDetail libraryModelDetail = new LibraryModelDetail(); @@ -298,24 +332,35 @@ public class OllamaAPI { /** * Finds a specific model using model name and tag from Ollama library. *

- * This method retrieves the model from the Ollama library by its name, then fetches its tags. - * It searches through the tags of the model to find one that matches the specified tag name. - * If the model or the tag is not found, it throws a {@link NoSuchElementException}. + * This method retrieves the model from the Ollama library by its name, then + * fetches its tags. + * It searches through the tags of the model to find one that matches the + * specified tag name. + * If the model or the tag is not found, it throws a + * {@link NoSuchElementException}. * * @param modelName The name of the model to search for in the library. * @param tag The tag name to search for within the specified model. - * @return The {@link LibraryModelTag} associated with the specified model and tag. - * @throws OllamaBaseException If there is a problem with the Ollama library operations. + * @return The {@link LibraryModelTag} associated with the specified model and + * tag. + * @throws OllamaBaseException If there is a problem with the Ollama library + * operations. * @throws IOException If an I/O error occurs during the operation. * @throws URISyntaxException If there is an error with the URI syntax. * @throws InterruptedException If the operation is interrupted. * @throws NoSuchElementException If the model or the tag is not found. */ - public LibraryModelTag findModelTagFromLibrary(String modelName, String tag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + public LibraryModelTag findModelTagFromLibrary(String modelName, String tag) + throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { List libraryModels = this.listModelsFromLibrary(); - LibraryModel libraryModel = libraryModels.stream().filter(model -> model.getName().equals(modelName)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Model by name '%s' not found", modelName))); + LibraryModel libraryModel = libraryModels.stream().filter(model -> model.getName().equals(modelName)) + .findFirst().orElseThrow( + () -> new NoSuchElementException(String.format("Model by name '%s' not found", modelName))); LibraryModelDetail libraryModelDetail = this.getLibraryModelDetails(libraryModel); - LibraryModelTag libraryModelTag = libraryModelDetail.getTags().stream().filter(tagName -> tagName.getTag().equals(tag)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Tag '%s' for model '%s' not found", tag, modelName))); + LibraryModelTag libraryModelTag = libraryModelDetail.getTags().stream() + .filter(tagName -> tagName.getTag().equals(tag)).findFirst() + .orElseThrow(() -> new NoSuchElementException( + String.format("Tag '%s' for model '%s' not found", tag, modelName))); return libraryModelTag; } @@ -329,7 +374,8 @@ public class OllamaAPI { * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public void pullModel(String modelName) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + public void pullModel(String modelName) + throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { String url = this.host + "/api/pull"; String jsonData = new ModelRequest(modelName).toString(); HttpRequest request = getRequestBuilderDefault(new URI(url)) @@ -343,7 +389,8 @@ public class OllamaAPI { InputStream responseBodyStream = response.body(); String responseString = ""; boolean success = false; // Flag to check the pull success. - try (BufferedReader reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { ModelPullResponse modelPullResponse = Utils.getObjectMapper().readValue(line, ModelPullResponse.class); @@ -369,11 +416,11 @@ public class OllamaAPI { } } - public String getVersion() throws URISyntaxException, IOException, InterruptedException, OllamaBaseException { String url = this.host + "/api/version"; HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); + HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").GET().build(); HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); String responseString = response.body(); @@ -386,17 +433,20 @@ public class OllamaAPI { /** * Pulls a model using the specified Ollama library model tag. - * The model is identified by a name and a tag, which are combined into a single identifier + * The model is identified by a name and a tag, which are combined into a single + * identifier * in the format "name:tag" to pull the corresponding model. * - * @param libraryModelTag the {@link LibraryModelTag} object containing the name and tag + * @param libraryModelTag the {@link LibraryModelTag} object containing the name + * and tag * of the model to be pulled. * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public void pullModel(LibraryModelTag libraryModelTag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + public void pullModel(LibraryModelTag libraryModelTag) + throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { String tagToPull = String.format("%s:%s", libraryModelTag.getName(), libraryModelTag.getTag()); pullModel(tagToPull); } @@ -411,10 +461,12 @@ public class OllamaAPI { * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public ModelDetail getModelDetails(String modelName) throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { + public ModelDetail getModelDetails(String modelName) + throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { String url = this.host + "/api/show"; String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); + HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); HttpClient client = HttpClient.newHttpClient(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -427,8 +479,10 @@ public class OllamaAPI { } /** - * Create a custom model from a model file. Read more about custom model file creation here. + * Create a custom model from a model file. Read more about custom model file + * creation here. * * @param modelName the name of the custom model to be created. * @param modelFilePath the path to model file that exists on the Ollama server. @@ -438,10 +492,13 @@ public class OllamaAPI { * @throws URISyntaxException if the URI for the request is malformed */ @Deprecated - public void createModelWithFilePath(String modelName, String modelFilePath) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + public void createModelWithFilePath(String modelName, String modelFilePath) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { String url = this.host + "/api/create"; String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); + HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); HttpClient client = HttpClient.newHttpClient(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -449,7 +506,8 @@ public class OllamaAPI { if (statusCode != 200) { throw new OllamaBaseException(statusCode + " - " + responseString); } - // FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this + // FIXME: Ollama API returns HTTP status code 200 for model creation failure + // cases. Correct this // if the issue is fixed in the Ollama API server. if (responseString.contains("error")) { throw new OllamaBaseException(responseString); @@ -460,21 +518,27 @@ public class OllamaAPI { } /** - * Create a custom model from a model file. Read more about custom model file creation here. + * Create a custom model from a model file. Read more about custom model file + * creation here. * * @param modelName the name of the custom model to be created. - * @param modelFileContents the path to model file that exists on the Ollama server. + * @param modelFileContents the path to model file that exists on the Ollama + * server. * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ @Deprecated - public void createModelWithModelFileContents(String modelName, String modelFileContents) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + public void createModelWithModelFileContents(String modelName, String modelFileContents) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { String url = this.host + "/api/create"; String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); + HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); HttpClient client = HttpClient.newHttpClient(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -492,7 +556,8 @@ public class OllamaAPI { /** * Create a custom model. Read more about custom model creation here. + * href= + * "https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model">here. * * @param customModelRequest custom model spec * @throws OllamaBaseException if the response indicates an error status @@ -500,10 +565,13 @@ public class OllamaAPI { * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public void createModel(CustomModelRequest customModelRequest) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + public void createModel(CustomModelRequest customModelRequest) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { String url = this.host + "/api/create"; String jsonData = customModelRequest.toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); + HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json") + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); HttpClient client = HttpClient.newHttpClient(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -523,16 +591,20 @@ public class OllamaAPI { * Delete a model from Ollama server. * * @param modelName the name of the model to be deleted. - * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. + * @param ignoreIfNotPresent ignore errors if the specified model is not present + * on Ollama server. * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + public void deleteModel(String modelName, boolean ignoreIfNotPresent) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { String url = this.host + "/api/delete"; String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).header("Accept", "application/json").header("Content-type", "application/json").build(); + HttpRequest request = getRequestBuilderDefault(new URI(url)) + .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) + .header("Accept", "application/json").header("Content-type", "application/json").build(); HttpClient client = HttpClient.newHttpClient(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -557,7 +629,8 @@ public class OllamaAPI { * @deprecated Use {@link #embed(String, List)} instead. */ @Deprecated - public List generateEmbeddings(String model, String prompt) throws IOException, InterruptedException, OllamaBaseException { + public List generateEmbeddings(String model, String prompt) + throws IOException, InterruptedException, OllamaBaseException { return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); } @@ -572,17 +645,20 @@ public class OllamaAPI { * @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead. */ @Deprecated - public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { + public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) + throws IOException, InterruptedException, OllamaBaseException { URI uri = URI.create(this.host + "/api/embeddings"); String jsonData = modelRequest.toString(); HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)); + HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).header("Accept", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData)); HttpRequest request = requestBuilder.build(); HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); String responseBody = response.body(); if (statusCode == 200) { - OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); + OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, + OllamaEmbeddingResponseModel.class); return embeddingResponse.getEmbedding(); } else { throw new OllamaBaseException(statusCode + " - " + responseBody); @@ -599,7 +675,8 @@ public class OllamaAPI { * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaEmbedResponseModel embed(String model, List inputs) throws IOException, InterruptedException, OllamaBaseException { + public OllamaEmbedResponseModel embed(String model, List inputs) + throws IOException, InterruptedException, OllamaBaseException { return embed(new OllamaEmbedRequestModel(model, inputs)); } @@ -612,12 +689,14 @@ public class OllamaAPI { * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { + public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) + throws IOException, InterruptedException, OllamaBaseException { URI uri = URI.create(this.host + "/api/embed"); String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest); HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest request = HttpRequest.newBuilder(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); + HttpRequest request = HttpRequest.newBuilder(uri).header("Accept", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); int statusCode = response.statusCode(); @@ -631,21 +710,26 @@ public class OllamaAPI { } /** - * Generate response for a question to a model running on Ollama server. This is a sync/blocking + * Generate response for a question to a model running on Ollama server. This is + * a sync/blocking * call. * * @param model the ollama model to ask the question to * @param prompt the prompt/question text * @param options the Options object - More + * href= + * "https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @param streamHandler optional callback consumer that will be applied every + * time a streamed response is received. If not set, the + * stream parameter of the request is set to false. * @return OllamaResult that includes response text and time taken for response * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { + public OllamaResult generate(String model, String prompt, boolean raw, Options options, + OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); ollamaRequestModel.setRaw(raw); ollamaRequestModel.setOptions(options.getOptionsMap()); @@ -653,36 +737,90 @@ public class OllamaAPI { } /** - * Generates response using the specified AI model and prompt (in blocking mode). + * Generates structured output from the specified AI model and prompt. + * + * @param model The name or identifier of the AI model to use for generating + * the response. + * @param prompt The input text or prompt to provide to the AI model. + * @param format A map containing the format specification for the structured output. + * @return An instance of {@link OllamaStructuredResult} containing the structured response. + * @throws OllamaBaseException if the response indicates an error status. + * @throws IOException if an I/O error occurs during the HTTP request. + * @throws InterruptedException if the operation is interrupted. + */ + public OllamaStructuredResult generate(String model, String prompt, Map format) + throws OllamaBaseException, IOException, InterruptedException { + URI uri = URI.create(this.host + "/api/generate"); + + Map requestBody = new HashMap<>(); + requestBody.put("model", model); + requestBody.put("prompt", prompt); + requestBody.put("stream", false); + requestBody.put("format", format); + + String jsonData = Utils.getObjectMapper().writeValueAsString(requestBody); + HttpClient httpClient = HttpClient.newHttpClient(); + + HttpRequest request = HttpRequest.newBuilder(uri) + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseBody = response.body(); + + if (statusCode == 200) { + return Utils.getObjectMapper().readValue(responseBody, OllamaStructuredResult.class); + } else { + throw new OllamaBaseException(statusCode + " - " + responseBody); + } + } + + /** + * Generates response using the specified AI model and prompt (in blocking + * mode). *

* Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} * - * @param model The name or identifier of the AI model to use for generating the response. + * @param model The name or identifier of the AI model to use for generating + * the response. * @param prompt The input text or prompt to provide to the AI model. - * @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. - * @param options Additional options or configurations to use when generating the response. + * @param raw In some cases, you may wish to bypass the templating system + * and provide a full prompt. In this case, you can use the raw + * parameter to disable templating. Also note that raw mode will + * not return a context. + * @param options Additional options or configurations to use when generating + * the response. * @return {@link OllamaResult} * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaResult generate(String model, String prompt, boolean raw, Options options) throws OllamaBaseException, IOException, InterruptedException { + public OllamaResult generate(String model, String prompt, boolean raw, Options options) + throws OllamaBaseException, IOException, InterruptedException { return generate(model, prompt, raw, options, null); } /** - * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools + * Generates response using the specified AI model and prompt (in blocking + * mode), and then invokes a set of tools * on the generated response. * - * @param model The name or identifier of the AI model to use for generating the response. + * @param model The name or identifier of the AI model to use for generating + * the response. * @param prompt The input text or prompt to provide to the AI model. - * @param options Additional options or configurations to use when generating the response. - * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. + * @param options Additional options or configurations to use when generating + * the response. + * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the + * response from the AI model and the results of invoking the tools on + * that output. * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaToolsResult generateWithTools(String model, String prompt, Options options) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { + public OllamaToolsResult generateWithTools(String model, String prompt, Options options) + throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { boolean raw = true; OllamaToolsResult toolResult = new OllamaToolsResult(); Map toolResults = new HashMap<>(); @@ -717,8 +855,7 @@ public class OllamaAPI { } toolFunctionCallSpecs = objectMapper.readValue( toolsResponse, - objectMapper.getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class) - ); + objectMapper.getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class)); } for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) { toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec)); @@ -728,8 +865,10 @@ public class OllamaAPI { } /** - * Generate response for a question to a model running on Ollama server and get a callback handle - * that can be used to check for status and get the response from the model later. This would be + * Generate response for a question to a model running on Ollama server and get + * a callback handle + * that can be used to check for status and get the response from the model + * later. This would be * an async/non-blocking call. * * @param model the ollama model to ask the question to @@ -740,28 +879,34 @@ public class OllamaAPI { OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); ollamaRequestModel.setRaw(raw); URI uri = URI.create(this.host + "/api/generate"); - OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer(getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); + OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer( + getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); ollamaAsyncResultStreamer.start(); return ollamaAsyncResultStreamer; } /** - * With one or more image files, ask a question to a model running on Ollama server. This is a + * With one or more image files, ask a question to a model running on Ollama + * server. This is a * sync/blocking call. * * @param model the ollama model to ask the question to * @param prompt the prompt/question text * @param imageFiles the list of image files to use for the question * @param options the Options object - More + * href= + * "https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @param streamHandler optional callback consumer that will be applied every + * time a streamed response is received. If not set, the + * stream parameter of the request is set to false. * @return OllamaResult that includes response text and time taken for response * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { + public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options, + OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { List images = new ArrayList<>(); for (File imageFile : imageFiles) { images.add(encodeFileToBase64(imageFile)); @@ -774,34 +919,42 @@ public class OllamaAPI { /** * Convenience method to call Ollama API without streaming responses. *

- * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} + * Uses + * {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} * * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options) throws OllamaBaseException, IOException, InterruptedException { + public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options) + throws OllamaBaseException, IOException, InterruptedException { return generateWithImageFiles(model, prompt, imageFiles, options, null); } /** - * With one or more image URLs, ask a question to a model running on Ollama server. This is a + * With one or more image URLs, ask a question to a model running on Ollama + * server. This is a * sync/blocking call. * * @param model the ollama model to ask the question to * @param prompt the prompt/question text * @param imageURLs the list of image URLs to use for the question * @param options the Options object - More + * href= + * "https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @param streamHandler optional callback consumer that will be applied every + * time a streamed response is received. If not set, the + * stream parameter of the request is set to false. * @return OllamaResult that includes response text and time taken for response * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options, + OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { List images = new ArrayList<>(); for (String imageURL : imageURLs) { images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); @@ -814,38 +967,45 @@ public class OllamaAPI { /** * Convenience method to call Ollama API without streaming responses. *

- * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} + * Uses + * {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} * * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted * @throws URISyntaxException if the URI for the request is malformed */ - public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options) + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { return generateWithImageURLs(model, prompt, imageURLs, options, null); } /** - * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api + * Ask a question to a model based on a given message stack (i.e. a chat + * history). Creates a synchronous call to the api * 'api/chat'. * * @param model the ollama model to ask the question to * @param messages chat history / message stack to send to the model - * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. + * @return {@link OllamaChatResult} containing the api response and the message + * history including the newly aqcuired assistant response. * @throws OllamaBaseException any response code than 200 has been returned * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen + * @throws InterruptedException in case the server is not reachable or network + * issues happen * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaChatResult chat(String model, List messages) throws OllamaBaseException, IOException, InterruptedException { + public OllamaChatResult chat(String model, List messages) + throws OllamaBaseException, IOException, InterruptedException { OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); return chat(builder.withMessages(messages).build()); } /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}. + * Ask a question to a model using an {@link OllamaChatRequest}. This can be + * constructed using an {@link OllamaChatRequestBuilder}. *

* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * @@ -853,55 +1013,69 @@ public class OllamaAPI { * @return {@link OllamaChatResult} * @throws OllamaBaseException any response code than 200 has been returned * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen + * @throws InterruptedException in case the server is not reachable or network + * issues happen * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException { + public OllamaChatResult chat(OllamaChatRequest request) + throws OllamaBaseException, IOException, InterruptedException { return chat(request, null); } /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}. + * Ask a question to a model using an {@link OllamaChatRequest}. This can be + * constructed using an {@link OllamaChatRequestBuilder}. *

* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * * @param request request object to be sent to the server - * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) + * @param streamHandler callback handler to handle the last message from stream + * (caution: all previous messages from stream will be + * concatenated) * @return {@link OllamaChatResult} * @throws OllamaBaseException any response code than 200 has been returned * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen + * @throws InterruptedException in case the server is not reachable or network + * issues happen * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { + public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException { return chatStreaming(request, new OllamaChatStreamObserver(streamHandler)); } /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}. + * Ask a question to a model using an {@link OllamaChatRequest}. This can be + * constructed using an {@link OllamaChatRequestBuilder}. *

* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * * @param request request object to be sent to the server - * @param tokenHandler callback handler to handle the last token from stream (caution: all previous messages from stream will be concatenated) + * @param tokenHandler callback handler to handle the last token from stream + * (caution: all previous messages from stream will be + * concatenated) * @return {@link OllamaChatResult} * @throws OllamaBaseException any response code than 200 has been returned * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen + * @throws InterruptedException in case the server is not reachable or network + * issues happen * @throws OllamaBaseException if the response indicates an error status * @throws IOException if an I/O error occurs during the HTTP request * @throws InterruptedException if the operation is interrupted */ - public OllamaChatResult chatStreaming(OllamaChatRequest request, OllamaTokenHandler tokenHandler) throws OllamaBaseException, IOException, InterruptedException { - OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, auth, requestTimeoutSeconds, verbose); + public OllamaChatResult chatStreaming(OllamaChatRequest request, OllamaTokenHandler tokenHandler) + throws OllamaBaseException, IOException, InterruptedException { + OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, auth, requestTimeoutSeconds, + verbose); OllamaChatResult result; // add all registered tools to Request - request.setTools(toolRegistry.getRegisteredSpecs().stream().map(Tools.ToolSpecification::getToolPrompt).collect(Collectors.toList())); + request.setTools(toolRegistry.getRegisteredSpecs().stream().map(Tools.ToolSpecification::getToolPrompt) + .collect(Collectors.toList())); if (tokenHandler != null) { request.setStream(true); @@ -919,7 +1093,8 @@ public class OllamaAPI { ToolFunction toolFunction = toolRegistry.getToolFunction(toolName); Map arguments = toolCall.getFunction().getArguments(); Object res = toolFunction.apply(arguments); - request.getMessages().add(new OllamaChatMessage(OllamaChatMessageRole.TOOL, "[TOOL_RESULTS]" + toolName + "(" + arguments.keySet() + ") : " + res + "[/TOOL_RESULTS]")); + request.getMessages().add(new OllamaChatMessage(OllamaChatMessageRole.TOOL, + "[TOOL_RESULTS]" + toolName + "(" + arguments.keySet() + ") : " + res + "[/TOOL_RESULTS]")); } if (tokenHandler != null) { @@ -935,9 +1110,11 @@ public class OllamaAPI { } /** - * Registers a single tool in the tool registry using the provided tool specification. + * Registers a single tool in the tool registry using the provided tool + * specification. * - * @param toolSpecification the specification of the tool to register. It contains the + * @param toolSpecification the specification of the tool to register. It + * contains the * tool's function name and other relevant information. */ public void registerTool(Tools.ToolSpecification toolSpecification) { @@ -948,11 +1125,14 @@ public class OllamaAPI { } /** - * Registers multiple tools in the tool registry using a list of tool specifications. + * Registers multiple tools in the tool registry using a list of tool + * specifications. * Iterates over the list and adds each tool specification to the registry. * - * @param toolSpecifications a list of tool specifications to register. Each specification - * contains information about a tool, such as its function name. + * @param toolSpecifications a list of tool specifications to register. Each + * specification + * contains information about a tool, such as its + * function name. */ public void registerTools(List toolSpecifications) { for (Tools.ToolSpecification toolSpecification : toolSpecifications) { @@ -961,12 +1141,16 @@ public class OllamaAPI { } /** - * Registers tools based on the annotations found on the methods of the caller's class and its providers. - * This method scans the caller's class for the {@link OllamaToolService} annotation and recursively registers + * Registers tools based on the annotations found on the methods of the caller's + * class and its providers. + * This method scans the caller's class for the {@link OllamaToolService} + * annotation and recursively registers * annotated tools from all the providers specified in the annotation. * - * @throws IllegalStateException if the caller's class is not annotated with {@link OllamaToolService}. - * @throws RuntimeException if any reflection-based instantiation or invocation fails. + * @throws IllegalStateException if the caller's class is not annotated with + * {@link OllamaToolService}. + * @throws RuntimeException if any reflection-based instantiation or + * invocation fails. */ public void registerAnnotatedTools() { try { @@ -986,19 +1170,24 @@ public class OllamaAPI { for (Class provider : providers) { registerAnnotatedTools(provider.getDeclaredConstructor().newInstance()); } - } catch (InstantiationException | NoSuchMethodException | IllegalAccessException | - InvocationTargetException e) { + } catch (InstantiationException | NoSuchMethodException | IllegalAccessException + | InvocationTargetException e) { throw new RuntimeException(e); } } /** - * Registers tools based on the annotations found on the methods of the provided object. - * This method scans the methods of the given object and registers tools using the {@link ToolSpec} annotation - * and associated {@link ToolProperty} annotations. It constructs tool specifications and stores them in a tool registry. + * Registers tools based on the annotations found on the methods of the provided + * object. + * This method scans the methods of the given object and registers tools using + * the {@link ToolSpec} annotation + * and associated {@link ToolProperty} annotations. It constructs tool + * specifications and stores them in a tool registry. * - * @param object the object whose methods are to be inspected for annotated tools. - * @throws RuntimeException if any reflection-based instantiation or invocation fails. + * @param object the object whose methods are to be inspected for annotated + * tools. + * @throws RuntimeException if any reflection-based instantiation or invocation + * fails. */ public void registerAnnotatedTools(Object object) { Class objectClass = object.getClass(); @@ -1022,12 +1211,22 @@ public class OllamaAPI { } String propName = !toolPropertyAnn.name().isBlank() ? toolPropertyAnn.name() : parameter.getName(); methodParams.put(propName, propType); - propsBuilder.withProperty(propName, Tools.PromptFuncDefinition.Property.builder().type(propType).description(toolPropertyAnn.desc()).required(toolPropertyAnn.required()).build()); + propsBuilder.withProperty(propName, Tools.PromptFuncDefinition.Property.builder().type(propType) + .description(toolPropertyAnn.desc()).required(toolPropertyAnn.required()).build()); } final Map params = propsBuilder.build(); - List reqProps = params.entrySet().stream().filter(e -> e.getValue().isRequired()).map(Map.Entry::getKey).collect(Collectors.toList()); + List reqProps = params.entrySet().stream().filter(e -> e.getValue().isRequired()) + .map(Map.Entry::getKey).collect(Collectors.toList()); - Tools.ToolSpecification toolSpecification = Tools.ToolSpecification.builder().functionName(operationName).functionDescription(operationDesc).toolPrompt(Tools.PromptFuncDefinition.builder().type("function").function(Tools.PromptFuncDefinition.PromptFuncSpec.builder().name(operationName).description(operationDesc).parameters(Tools.PromptFuncDefinition.Parameters.builder().type("object").properties(params).required(reqProps).build()).build()).build()).build(); + Tools.ToolSpecification toolSpecification = Tools.ToolSpecification.builder().functionName(operationName) + .functionDescription(operationDesc) + .toolPrompt(Tools.PromptFuncDefinition.builder().type("function") + .function(Tools.PromptFuncDefinition.PromptFuncSpec.builder().name(operationName) + .description(operationDesc).parameters(Tools.PromptFuncDefinition.Parameters + .builder().type("object").properties(params).required(reqProps).build()) + .build()) + .build()) + .build(); ReflectionalToolFunction reflectionalToolFunction = new ReflectionalToolFunction(object, m, methodParams); toolSpecification.setToolFunction(reflectionalToolFunction); @@ -1060,13 +1259,13 @@ public class OllamaAPI { * * @param roleName the name of the role to retrieve * @return the OllamaChatMessageRole associated with the given name - * @throws RoleNotFoundException if the role with the specified name does not exist + * @throws RoleNotFoundException if the role with the specified name does not + * exist */ public OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { return OllamaChatMessageRole.getRole(roleName); } - // technical private methods // /** @@ -1092,18 +1291,26 @@ public class OllamaAPI { /** * Generates a request for the Ollama API and returns the result. - * This method synchronously calls the Ollama API. If a stream handler is provided, - * the request will be streamed; otherwise, a regular synchronous request will be made. + * This method synchronously calls the Ollama API. If a stream handler is + * provided, + * the request will be streamed; otherwise, a regular synchronous request will + * be made. * - * @param ollamaRequestModel the request model containing necessary parameters for the Ollama API request. - * @param streamHandler the stream handler to process streaming responses, or null for non-streaming requests. + * @param ollamaRequestModel the request model containing necessary parameters + * for the Ollama API request. + * @param streamHandler the stream handler to process streaming responses, + * or null for non-streaming requests. * @return the result of the Ollama API request. - * @throws OllamaBaseException if the request fails due to an issue with the Ollama API. - * @throws IOException if an I/O error occurs during the request process. + * @throws OllamaBaseException if the request fails due to an issue with the + * Ollama API. + * @throws IOException if an I/O error occurs during the request + * process. * @throws InterruptedException if the thread is interrupted during the request. */ - private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds, verbose); + private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, + OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { + OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds, + verbose); OllamaResult result; if (streamHandler != null) { ollamaRequestModel.setStream(true); @@ -1114,7 +1321,6 @@ public class OllamaAPI { return result; } - /** * Get default request builder. * @@ -1122,7 +1328,8 @@ public class OllamaAPI { * @return HttpRequest.Builder */ private HttpRequest.Builder getRequestBuilderDefault(URI uri) { - HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri).header("Content-Type", "application/json").timeout(Duration.ofSeconds(requestTimeoutSeconds)); + HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri).header("Content-Type", "application/json") + .timeout(Duration.ofSeconds(requestTimeoutSeconds)); if (isBasicAuthCredentialsSet()) { requestBuilder.header("Authorization", auth.getAuthHeaderValue()); } @@ -1147,7 +1354,8 @@ public class OllamaAPI { logger.debug("Invoking function {} with arguments {}", methodName, arguments); } if (function == null) { - throw new ToolNotFoundException("No such tool: " + methodName + ". Please register the tool before invoking it."); + throw new ToolNotFoundException( + "No such tool: " + methodName + ". Please register the tool before invoking it."); } return function.apply(arguments); } catch (Exception e) { diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java b/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java new file mode 100644 index 0000000..42b8e4e --- /dev/null +++ b/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java @@ -0,0 +1,22 @@ +package io.github.ollama4j.models.response; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; + +/** + * Structured response for Ollama API + */ +@Data +@JsonIgnoreProperties(ignoreUnknown = true) +public class OllamaStructuredResult { + + @JsonProperty("response") + private String response; + + @JsonProperty("httpStatusCode") + private int httpStatusCode; + + @JsonProperty("responseTime") + private long responseTime; +} diff --git a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java index cbdecbe..1d4c864 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java +++ b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java @@ -4,10 +4,7 @@ import io.github.ollama4j.OllamaAPI; import io.github.ollama4j.exceptions.OllamaBaseException; import io.github.ollama4j.models.chat.*; import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; -import io.github.ollama4j.models.response.LibraryModel; -import io.github.ollama4j.models.response.Model; -import io.github.ollama4j.models.response.ModelDetail; -import io.github.ollama4j.models.response.OllamaResult; +import io.github.ollama4j.models.response.*; import io.github.ollama4j.samples.AnnotatedTool; import io.github.ollama4j.tools.OllamaToolCallsFunction; import io.github.ollama4j.tools.ToolFunction; @@ -23,12 +20,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.ollama.OllamaContainer; +import com.fasterxml.jackson.core.type.TypeReference; + import java.io.File; import java.io.IOException; import java.net.ConnectException; import java.net.URISyntaxException; import java.util.*; +import static io.github.ollama4j.utils.Utils.getObjectMapper; import static org.junit.jupiter.api.Assertions.*; @OllamaToolService(providers = {AnnotatedTool.class}) @@ -126,32 +126,60 @@ public class OllamaAPIIntegrationTest { void testAskModelWithDefaultOptions() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { String chatModel = "qwen2.5:0.5b"; api.pullModel(chatModel); - OllamaResult result = - api.generate( - chatModel, - "What is the capital of France? And what's France's connection with Mona Lisa?", - false, - new OptionsBuilder().build()); + OllamaResult result = api.generate(chatModel, "What is the capital of France? And what's France's connection with Mona Lisa?", false, new OptionsBuilder().build()); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } + @Test + @Order(6) + void testAskModelWithStructuredOutput() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + String chatModel = "llama3.1:8b"; + chatModel = "qwen2.5:0.5b"; + api.pullModel(chatModel); + + String prompt = "Ollama is 22 years old and is busy saving the world. Respond using JSON"; + Map format = new HashMap<>(); + format.put("type", "object"); + format.put("properties", new HashMap() {{ + put("age", new HashMap() {{ + put("type", "integer"); + }}); + put("available", new HashMap() {{ + put("type", "boolean"); + }}); + }}); + format.put("required", Arrays.asList("age", "available")); + + OllamaStructuredResult result = api.generate(chatModel, prompt, format); + + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + + Map actualResponse = getObjectMapper().readValue(result.getResponse(), new TypeReference<>() { + }); + + String expectedResponseJson = "{\n \"age\": 22,\n \"available\": true\n}"; + Map expectedResponse = getObjectMapper().readValue(expectedResponseJson, new TypeReference>() { + }); + assertEquals(actualResponse.get("age").toString(), expectedResponse.get("age").toString()); + assertEquals(actualResponse.get("available").toString(), expectedResponse.get("available").toString()); + } + @Test @Order(7) void testAskModelWithDefaultOptionsStreamed() throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { String chatModel = "qwen2.5:0.5b"; api.pullModel(chatModel); StringBuffer sb = new StringBuffer(); - OllamaResult result = api.generate(chatModel, - "What is the capital of France? And what's France's connection with Mona Lisa?", - false, - new OptionsBuilder().build(), (s) -> { - LOG.info(s); - String substring = s.substring(sb.toString().length(), s.length()); - LOG.info(substring); - sb.append(substring); - }); + OllamaResult result = api.generate(chatModel, "What is the capital of France? And what's France's connection with Mona Lisa?", false, new OptionsBuilder().build(), (s) -> { + LOG.info(s); + String substring = s.substring(sb.toString().length(), s.length()); + LOG.info(substring); + sb.append(substring); + }); assertNotNull(result); assertNotNull(result.getResponse()); @@ -166,11 +194,8 @@ public class OllamaAPIIntegrationTest { api.pullModel(chatModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a helpful assistant who can generate random person's first and last names in the format [First name, Last name].") - .build(); - requestModel = builder.withMessages(requestModel.getMessages()) - .withMessage(OllamaChatMessageRole.USER, "Give me a cool name") - .withOptions(new OptionsBuilder().setTemperature(0.5f).build()).build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a helpful assistant who can generate random person's first and last names in the format [First name, Last name].").build(); + requestModel = builder.withMessages(requestModel.getMessages()).withMessage(OllamaChatMessageRole.USER, "Give me a cool name").withOptions(new OptionsBuilder().setTemperature(0.5f).build()).build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); @@ -184,12 +209,7 @@ public class OllamaAPIIntegrationTest { String chatModel = "llama3.2:1b"; api.pullModel(chatModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, - "You are a silent bot that only says 'Shush'. Do not say anything else under any circumstances!") - .withMessage(OllamaChatMessageRole.USER, - "What's something that's brown and sticky?") - .withOptions(new OptionsBuilder().setTemperature(0.8f).build()) - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'Shush'. Do not say anything else under any circumstances!").withMessage(OllamaChatMessageRole.USER, "What's something that's brown and sticky?").withOptions(new OptionsBuilder().setTemperature(0.8f).build()).build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); @@ -208,36 +228,23 @@ public class OllamaAPIIntegrationTest { OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); // Create the initial user question - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is 1+1? Answer only in numbers.") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is 1+1? Answer only in numbers.").build(); // Start conversation with model OllamaChatResult chatResult = api.chat(requestModel); - assertTrue( - chatResult.getChatHistory().stream() - .anyMatch(chat -> chat.getContent().contains("2")), - "Expected chat history to contain '2'" - ); + assertTrue(chatResult.getChatHistory().stream().anyMatch(chat -> chat.getContent().contains("2")), "Expected chat history to contain '2'"); // Create the next user question: second largest city - requestModel = builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "And what is its squared value?") - .build(); + requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is its squared value?").build(); // Continue conversation with model chatResult = api.chat(requestModel); - assertTrue( - chatResult.getChatHistory().stream() - .anyMatch(chat -> chat.getContent().contains("4")), - "Expected chat history to contain '4'" - ); + assertTrue(chatResult.getChatHistory().stream().anyMatch(chat -> chat.getContent().contains("4")), "Expected chat history to contain '4'"); // Create the next user question: the third question - requestModel = builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "What is the largest value between 2, 4 and 6?") - .build(); + requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "What is the largest value between 2, 4 and 6?").build(); // Continue conversation with the model for the third question chatResult = api.chat(requestModel); @@ -255,38 +262,33 @@ public class OllamaAPIIntegrationTest { api.pullModel(imageModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(imageModel); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", Collections.emptyList(), - "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", Collections.emptyList(), "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg").build(); api.registerAnnotatedTools(new OllamaAPIIntegrationTest()); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); } + @Test @Order(10) void testChatWithImageFromFileWithHistoryRecognition() throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { String imageModel = "moondream"; api.pullModel(imageModel); - OllamaChatRequestBuilder builder = - OllamaChatRequestBuilder.getInstance(imageModel); - OllamaChatRequest requestModel = - builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", Collections.emptyList(), - List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(imageModel); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", Collections.emptyList(), List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); builder.reset(); - requestModel = - builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); + requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); chatResult = api.chat(requestModel); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); } + @Test @Order(11) void testChatWithExplicitToolDefinition() throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { @@ -294,41 +296,14 @@ public class OllamaAPIIntegrationTest { api.pullModel(chatModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); - final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() - .functionName("get-employee-details") - .functionDescription("Get employee details from the database") - .toolPrompt( - Tools.PromptFuncDefinition.builder().type("function").function( - Tools.PromptFuncDefinition.PromptFuncSpec.builder() - .name("get-employee-details") - .description("Get employee details from the database") - .parameters( - Tools.PromptFuncDefinition.Parameters.builder() - .type("object") - .properties( - new Tools.PropsBuilder() - .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) - .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) - .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) - .build() - ) - .required(List.of("employee-name")) - .build() - ).build() - ).build() - ) - .toolFunction(arguments -> { - // perform DB operations here - return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name"), arguments.get("employee-address"), arguments.get("employee-phone")); - }) - .build(); + final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder().functionName("get-employee-details").functionDescription("Get employee details from the database").toolPrompt(Tools.PromptFuncDefinition.builder().type("function").function(Tools.PromptFuncDefinition.PromptFuncSpec.builder().name("get-employee-details").description("Get employee details from the database").parameters(Tools.PromptFuncDefinition.Parameters.builder().type("object").properties(new Tools.PropsBuilder().withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()).withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()).withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()).build()).required(List.of("employee-name")).build()).build()).build()).toolFunction(arguments -> { + // perform DB operations here + return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name"), arguments.get("employee-address"), arguments.get("employee-phone")); + }).build(); api.registerTool(databaseQueryToolSpecification); - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, - "Give me the ID of the employee named 'Rahul Kumar'?") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Give me the ID of the employee named 'Rahul Kumar'?").build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); @@ -358,10 +333,7 @@ public class OllamaAPIIntegrationTest { api.registerAnnotatedTools(); - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, - "Compute the most important constant in the world using 5 digits") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Compute the most important constant in the world using 5 digits").build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); @@ -390,11 +362,7 @@ public class OllamaAPIIntegrationTest { api.registerAnnotatedTools(new AnnotatedTool()); - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, - "Greet Pedro with a lot of hearts and respond to me, " + - "and state how many emojis have been in your greeting") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Greet Pedro with a lot of hearts and respond to me, " + "and state how many emojis have been in your greeting").build(); OllamaChatResult chatResult = api.chat(requestModel); assertNotNull(chatResult); @@ -423,44 +391,17 @@ public class OllamaAPIIntegrationTest { String chatModel = "llama3.2:1b"; api.pullModel(chatModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); - final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() - .functionName("get-employee-details") - .functionDescription("Get employee details from the database") - .toolPrompt( - Tools.PromptFuncDefinition.builder().type("function").function( - Tools.PromptFuncDefinition.PromptFuncSpec.builder() - .name("get-employee-details") - .description("Get employee details from the database") - .parameters( - Tools.PromptFuncDefinition.Parameters.builder() - .type("object") - .properties( - new Tools.PropsBuilder() - .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) - .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) - .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) - .build() - ) - .required(List.of("employee-name")) - .build() - ).build() - ).build() - ) - .toolFunction(new ToolFunction() { - @Override - public Object apply(Map arguments) { - // perform DB operations here - return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name"), arguments.get("employee-address"), arguments.get("employee-phone")); - } - }) - .build(); + final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder().functionName("get-employee-details").functionDescription("Get employee details from the database").toolPrompt(Tools.PromptFuncDefinition.builder().type("function").function(Tools.PromptFuncDefinition.PromptFuncSpec.builder().name("get-employee-details").description("Get employee details from the database").parameters(Tools.PromptFuncDefinition.Parameters.builder().type("object").properties(new Tools.PropsBuilder().withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()).withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()).withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()).build()).required(List.of("employee-name")).build()).build()).build()).toolFunction(new ToolFunction() { + @Override + public Object apply(Map arguments) { + // perform DB operations here + return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name"), arguments.get("employee-address"), arguments.get("employee-phone")); + } + }).build(); api.registerTool(databaseQueryToolSpecification); - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, - "Give me the ID of the employee named 'Rahul Kumar'?") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Give me the ID of the employee named 'Rahul Kumar'?").build(); StringBuffer sb = new StringBuffer(); @@ -483,9 +424,7 @@ public class OllamaAPIIntegrationTest { String chatModel = "llama3.2:1b"; api.pullModel(chatModel); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(chatModel); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); + OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?").build(); StringBuffer sb = new StringBuffer(); @@ -509,13 +448,7 @@ public class OllamaAPIIntegrationTest { String imageModel = "llava"; api.pullModel(imageModel); - OllamaResult result = - api.generateWithImageURLs( - imageModel, - "What is in this image?", - List.of( - "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), - new OptionsBuilder().build()); + OllamaResult result = api.generateWithImageURLs(imageModel, "What is in this image?", List.of("https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), new OptionsBuilder().build()); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); @@ -528,12 +461,7 @@ public class OllamaAPIIntegrationTest { api.pullModel(imageModel); File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); try { - OllamaResult result = - api.generateWithImageFiles( - imageModel, - "What is in this image?", - List.of(imageFile), - new OptionsBuilder().build()); + OllamaResult result = api.generateWithImageFiles(imageModel, "What is in this image?", List.of(imageFile), new OptionsBuilder().build()); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); @@ -543,7 +471,6 @@ public class OllamaAPIIntegrationTest { } - @Test @Order(20) void testAskModelWithOptionsAndImageFilesStreamed() throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { @@ -554,13 +481,12 @@ public class OllamaAPIIntegrationTest { StringBuffer sb = new StringBuffer(); - OllamaResult result = api.generateWithImageFiles(imageModel, - "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { - LOG.info(s); - String substring = s.substring(sb.toString().length(), s.length()); - LOG.info(substring); - sb.append(substring); - }); + OllamaResult result = api.generateWithImageFiles(imageModel, "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { + LOG.info(s); + String substring = s.substring(sb.toString().length(), s.length()); + LOG.info(substring); + sb.append(substring); + }); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty());