From f114181fe24278ec3333290fc41e17db01847194 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 29 Sep 2025 09:31:32 +0530 Subject: [PATCH] Update documentation and refactor code to replace `OllamaAPI` with `Ollama` - Replaced all instances of `OllamaAPI` with `Ollama` in documentation and code examples for consistency. - Enhanced the configuration for handling broken markdown links in Docusaurus. - Updated integration tests and example code snippets to reflect the new class structure. --- docs/METRICS.md | 1 - docs/blog/2025-03-08-blog/index.md | 1 - docs/docs/apis-extras/options-builder.md | 11 ++++++++++- docs/docs/apis-extras/ping.md | 1 - docs/docs/apis-extras/prompt-builder.md | 1 - docs/docs/apis-extras/timeouts.md | 1 - docs/docs/apis-generate/chat-with-tools.md | 8 ++++---- docs/docs/apis-generate/custom-roles.md | 2 -- docs/docs/intro.md | 6 ++---- docs/docusaurus.config.js | 4 +++- .../integrationtests/OllamaIntegrationTest.java | 4 ++-- .../io/github/ollama4j/integrationtests/WithAuth.java | 8 ++++---- 12 files changed, 25 insertions(+), 23 deletions(-) diff --git a/docs/METRICS.md b/docs/METRICS.md index 83dade6..7072810 100644 --- a/docs/METRICS.md +++ b/docs/METRICS.md @@ -18,7 +18,6 @@ The metrics integration provides the following metrics: ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; // Create API instance with metrics enabled Ollama ollama = new Ollama(); diff --git a/docs/blog/2025-03-08-blog/index.md b/docs/blog/2025-03-08-blog/index.md index 637b96b..f520696 100644 --- a/docs/blog/2025-03-08-blog/index.md +++ b/docs/blog/2025-03-08-blog/index.md @@ -337,7 +337,6 @@ import com.couchbase.client.java.Scope; import com.couchbase.client.java.json.JsonObject; import com.couchbase.client.java.query.QueryResult; import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.exceptions.ToolInvocationException; import io.github.ollama4j.tools.OllamaToolsResult; diff --git a/docs/docs/apis-extras/options-builder.md b/docs/docs/apis-extras/options-builder.md index a838950..7873d7a 100644 --- a/docs/docs/apis-extras/options-builder.md +++ b/docs/docs/apis-extras/options-builder.md @@ -65,7 +65,7 @@ public class Main { String host = "http://localhost:11434/"; - OllamaAPI ollama = new OllamaAPI(host); + Ollama ollama = new Ollama(host); Options options = new OptionsBuilder() @@ -74,6 +74,15 @@ public class Main { .setNumGpu(2) .setTemperature(1.5f) .build(); + + OllamaResult result = + ollama.generate( + OllamaGenerateRequestBuilder.builder() + .withModel(model) + .withPrompt("Who are you?") + .withOptions(options) + .build(), + null); } } ``` diff --git a/docs/docs/apis-extras/ping.md b/docs/docs/apis-extras/ping.md index 34153b1..d53db0e 100644 --- a/docs/docs/apis-extras/ping.md +++ b/docs/docs/apis-extras/ping.md @@ -8,7 +8,6 @@ This API lets you check the reachability of Ollama server. ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; public class Main { diff --git a/docs/docs/apis-extras/prompt-builder.md b/docs/docs/apis-extras/prompt-builder.md index 9101e54..bcfa604 100644 --- a/docs/docs/apis-extras/prompt-builder.md +++ b/docs/docs/apis-extras/prompt-builder.md @@ -9,7 +9,6 @@ inferences. ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; import io.github.ollama4j.models.response.OllamaResult; import io.github.ollama4j.types.OllamaModelType; import io.github.ollama4j.utils.OptionsBuilder; diff --git a/docs/docs/apis-extras/timeouts.md b/docs/docs/apis-extras/timeouts.md index eb3354d..ae1204f 100644 --- a/docs/docs/apis-extras/timeouts.md +++ b/docs/docs/apis-extras/timeouts.md @@ -10,7 +10,6 @@ This API lets you set the request timeout for the Ollama client. ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; public class Main { diff --git a/docs/docs/apis-generate/chat-with-tools.md b/docs/docs/apis-generate/chat-with-tools.md index eca5e15..31f91bd 100644 --- a/docs/docs/apis-generate/chat-with-tools.md +++ b/docs/docs/apis-generate/chat-with-tools.md @@ -16,7 +16,7 @@ experience. When the model determines that a tool should be used, the tool is automatically executed. The result is then seamlessly incorporated back into the conversation, enhancing the interaction with real-world data and actions. -The following example demonstrates usage of a simple tool, registered with the `OllamaAPI`, and then used within a chat +The following example demonstrates usage of a simple tool, registered with the `Ollama`, and then used within a chat session. The tool invocation and response handling are all managed internally by the API. @@ -33,7 +33,7 @@ This tool calling can also be done using the streaming API. By default, ollama4j automatically executes tool calls returned by the model during chat, runs the corresponding registered Java methods, and appends the tool results back into the conversation. For some applications, you may want to intercept tool calls and decide yourself when and how to execute them (for example, to queue them, to show a confirmation UI to the user, to run them in a sandbox, or to perform multi‑step orchestration). -To enable this behavior, set the useTools flag to true on your OllamaAPI instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually. +To enable this behavior, set the useTools flag to true on your Ollama instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually. Notes: @@ -57,10 +57,10 @@ To use a method as a tool within a chat call, follow these steps: * `java.lang.Boolean` * `java.math.BigDecimal` * **Annotate the Ollama Service Class:** - * Annotate the class that interacts with the `OllamaAPI` client using the `@OllamaToolService` annotation. Reference + * Annotate the class that interacts with the `Ollama` client using the `@OllamaToolService` annotation. Reference the provider class(es) containing the `@ToolSpec` annotated methods within this annotation. * **Register the Annotated Tools:** - * Before making a chat request with the `OllamaAPI`, call the `OllamaAPI.registerAnnotatedTools()` method. This + * Before making a chat request with the `Ollama`, call the `Ollama.registerAnnotatedTools()` method. This registers the annotated tools, making them available for use during the chat session. Let's try an example. Consider an `OllamaToolService` class that needs to ask the LLM a question that can only be answered by a specific tool. diff --git a/docs/docs/apis-generate/custom-roles.md b/docs/docs/apis-generate/custom-roles.md index 9858792..bb9e6de 100644 --- a/docs/docs/apis-generate/custom-roles.md +++ b/docs/docs/apis-generate/custom-roles.md @@ -17,7 +17,6 @@ _Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._ ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; import io.github.ollama4j.models.chat.OllamaChatMessageRole; public class Main { @@ -52,7 +51,6 @@ public class Main { ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; import io.github.ollama4j.models.chat.OllamaChatMessageRole; public class Main { diff --git a/docs/docs/intro.md b/docs/docs/intro.md index f237a31..80749ff 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -113,9 +113,8 @@ Create a new Java class in your project and add this code. ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; -public class OllamaAPITest { +public class OllamaTest { public static void main(String[] args) { Ollama ollama = new Ollama(); @@ -132,9 +131,8 @@ Specify a different Ollama host that you want to connect to. ```java import io.github.ollama4j.Ollama; -import io.github.ollama4j.OllamaAPI; -public class OllamaAPITest { +public class OllamaTest { public static void main(String[] args) { String host = "http://localhost:11434/"; diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 8d3eddc..a7363ad 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -24,7 +24,6 @@ const config = { projectName: 'ollama4j', // Usually your repo name. onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'warn', // Even if you don't use internationalization, you can use this field to set // useful metadata like html lang. For example, if your site is Chinese, you @@ -175,6 +174,9 @@ const config = { }), markdown: { mermaid: true, + hooks: { + onBrokenMarkdownLinks: 'warn' + } }, themes: ['@docusaurus/theme-mermaid'] }; diff --git a/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java b/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java index 53e0ef5..7e8ea90 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java +++ b/src/test/java/io/github/ollama4j/integrationtests/OllamaIntegrationTest.java @@ -58,9 +58,9 @@ class OllamaIntegrationTest { private static final String TOOLS_MODEL = "mistral:7b"; /** - * Initializes the OllamaAPI instance for integration tests. + * Initializes the Ollama instance for integration tests. * - *

This method sets up the OllamaAPI client, either using an external Ollama host (if + *

This method sets up the Ollama client, either using an external Ollama host (if * environment variables are set) or by starting a Testcontainers-based Ollama instance. It also * configures request timeout and model pull retry settings. */ diff --git a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java index fe22ea3..2d8a66c 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java +++ b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java @@ -88,7 +88,7 @@ public class WithAuth { + "→ Proxy URL: {}", ollamaUrl, nginxUrl); - LOG.info("OllamaAPI initialized with bearer auth token: {}", BEARER_AUTH_TOKEN); + LOG.info("Ollama initialized with bearer auth token: {}", BEARER_AUTH_TOKEN); } private static OllamaContainer createOllamaContainer() { @@ -155,9 +155,9 @@ public class WithAuth { try { assertTrue( api.ping(), - "Expected OllamaAPI to successfully ping through NGINX with valid auth token."); + "Expected Ollama to successfully ping through NGINX with valid auth token."); } catch (Exception e) { - fail("Exception occurred while pinging OllamaAPI through NGINX: " + e.getMessage(), e); + fail("Exception occurred while pinging Ollama through NGINX: " + e.getMessage(), e); } } @@ -168,7 +168,7 @@ public class WithAuth { try { assertFalse( api.ping(), - "Expected OllamaAPI ping to fail through NGINX with an invalid auth token."); + "Expected Ollama ping to fail through NGINX with an invalid auth token."); } catch (Exception e) { // If an exception is thrown, that's also an expected failure for a wrong token // (e.g., OllamaBaseException or IOException)