diff --git a/docs/METRICS.md b/docs/METRICS.md
index 83dade6..7072810 100644
--- a/docs/METRICS.md
+++ b/docs/METRICS.md
@@ -18,7 +18,6 @@ The metrics integration provides the following metrics:
```java
import io.github.ollama4j.Ollama;
-import io.github.ollama4j.OllamaAPI;
// Create API instance with metrics enabled
Ollama ollama = new Ollama();
diff --git a/docs/blog/2025-03-08-blog/index.md b/docs/blog/2025-03-08-blog/index.md
index 637b96b..f520696 100644
--- a/docs/blog/2025-03-08-blog/index.md
+++ b/docs/blog/2025-03-08-blog/index.md
@@ -337,7 +337,6 @@ import com.couchbase.client.java.Scope;
import com.couchbase.client.java.json.JsonObject;
import com.couchbase.client.java.query.QueryResult;
import io.github.ollama4j.Ollama;
-import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaException;
import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.ollama4j.tools.OllamaToolsResult;
diff --git a/docs/docs/apis-extras/options-builder.md b/docs/docs/apis-extras/options-builder.md
index a838950..7873d7a 100644
--- a/docs/docs/apis-extras/options-builder.md
+++ b/docs/docs/apis-extras/options-builder.md
@@ -65,7 +65,7 @@ public class Main {
String host = "http://localhost:11434/";
- OllamaAPI ollama = new OllamaAPI(host);
+ Ollama ollama = new Ollama(host);
Options options =
new OptionsBuilder()
@@ -74,6 +74,15 @@ public class Main {
.setNumGpu(2)
.setTemperature(1.5f)
.build();
+
+ OllamaResult result =
+ ollama.generate(
+ OllamaGenerateRequestBuilder.builder()
+ .withModel(model)
+ .withPrompt("Who are you?")
+ .withOptions(options)
+ .build(),
+ null);
}
}
```
diff --git a/docs/docs/apis-extras/ping.md b/docs/docs/apis-extras/ping.md
index 34153b1..d53db0e 100644
--- a/docs/docs/apis-extras/ping.md
+++ b/docs/docs/apis-extras/ping.md
@@ -8,7 +8,6 @@ This API lets you check the reachability of Ollama server.
```java
import io.github.ollama4j.Ollama;
-import io.github.ollama4j.OllamaAPI;
public class Main {
diff --git a/docs/docs/apis-extras/prompt-builder.md b/docs/docs/apis-extras/prompt-builder.md
index 9101e54..bcfa604 100644
--- a/docs/docs/apis-extras/prompt-builder.md
+++ b/docs/docs/apis-extras/prompt-builder.md
@@ -9,7 +9,6 @@ inferences.
```java
import io.github.ollama4j.Ollama;
-import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
diff --git a/docs/docs/apis-extras/timeouts.md b/docs/docs/apis-extras/timeouts.md
index eb3354d..ae1204f 100644
--- a/docs/docs/apis-extras/timeouts.md
+++ b/docs/docs/apis-extras/timeouts.md
@@ -10,7 +10,6 @@ This API lets you set the request timeout for the Ollama client.
```java
import io.github.ollama4j.Ollama;
-import io.github.ollama4j.OllamaAPI;
public class Main {
diff --git a/docs/docs/apis-generate/chat-with-tools.md b/docs/docs/apis-generate/chat-with-tools.md
index eca5e15..31f91bd 100644
--- a/docs/docs/apis-generate/chat-with-tools.md
+++ b/docs/docs/apis-generate/chat-with-tools.md
@@ -16,7 +16,7 @@ experience.
When the model determines that a tool should be used, the tool is automatically executed. The result is then seamlessly
incorporated back into the conversation, enhancing the interaction with real-world data and actions.
-The following example demonstrates usage of a simple tool, registered with the `OllamaAPI`, and then used within a chat
+The following example demonstrates usage of a simple tool, registered with the `Ollama`, and then used within a chat
session. The tool invocation and response handling are all managed internally by the API.
This method sets up the OllamaAPI client, either using an external Ollama host (if + *
This method sets up the Ollama client, either using an external Ollama host (if * environment variables are set) or by starting a Testcontainers-based Ollama instance. It also * configures request timeout and model pull retry settings. */ diff --git a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java index fe22ea3..2d8a66c 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java +++ b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java @@ -88,7 +88,7 @@ public class WithAuth { + "→ Proxy URL: {}", ollamaUrl, nginxUrl); - LOG.info("OllamaAPI initialized with bearer auth token: {}", BEARER_AUTH_TOKEN); + LOG.info("Ollama initialized with bearer auth token: {}", BEARER_AUTH_TOKEN); } private static OllamaContainer createOllamaContainer() { @@ -155,9 +155,9 @@ public class WithAuth { try { assertTrue( api.ping(), - "Expected OllamaAPI to successfully ping through NGINX with valid auth token."); + "Expected Ollama to successfully ping through NGINX with valid auth token."); } catch (Exception e) { - fail("Exception occurred while pinging OllamaAPI through NGINX: " + e.getMessage(), e); + fail("Exception occurred while pinging Ollama through NGINX: " + e.getMessage(), e); } } @@ -168,7 +168,7 @@ public class WithAuth { try { assertFalse( api.ping(), - "Expected OllamaAPI ping to fail through NGINX with an invalid auth token."); + "Expected Ollama ping to fail through NGINX with an invalid auth token."); } catch (Exception e) { // If an exception is thrown, that's also an expected failure for a wrong token // (e.g., OllamaBaseException or IOException)