forked from Mirror/ollama4j
Refactor OllamaAPI to Ollama class and update documentation
- Replaced instances of `OllamaAPI` with `Ollama` across the codebase for consistency. - Updated example code snippets in documentation to reflect the new class name. - Enhanced metrics collection setup in the documentation. - Added integration tests for the new `Ollama` class to ensure functionality remains intact.
This commit is contained in:
@@ -10,7 +10,7 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
|
||||
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
@@ -18,9 +18,9 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setBasicAuth("username", "password");
|
||||
ollama.setBasicAuth("username", "password");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -10,7 +10,7 @@ Ollama server would be setup behind a gateway/reverse proxy with bearer auth.
|
||||
After configuring bearer authentication, all subsequent requests will include the Bearer Auth header.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
@@ -18,9 +18,9 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setBearerAuth("YOUR-TOKEN");
|
||||
ollama.setBearerAuth("YOUR-TOKEN");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -36,7 +36,7 @@ from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ol
|
||||
## Build an empty `Options` object
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.utils.Options;
|
||||
import io.github.ollama4j.utils.OptionsBuilder;
|
||||
|
||||
@@ -46,7 +46,7 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
Options options = new OptionsBuilder().build();
|
||||
}
|
||||
@@ -65,7 +65,7 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaAPI ollama = new OllamaAPI(host);
|
||||
|
||||
Options options =
|
||||
new OptionsBuilder()
|
||||
|
||||
@@ -7,6 +7,7 @@ sidebar_position: 5
|
||||
This API lets you check the reachability of Ollama server.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
|
||||
public class Main {
|
||||
@@ -14,9 +15,9 @@ public class Main {
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.ping();
|
||||
ollama.ping();
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -8,6 +8,7 @@ This is designed for prompt engineering. It allows you to easily build the promp
|
||||
inferences.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.models.response.OllamaResult;
|
||||
import io.github.ollama4j.types.OllamaModelType;
|
||||
@@ -18,8 +19,8 @@ public class Main {
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
Ollama ollama = new Ollama(host);
|
||||
ollama.setRequestTimeoutSeconds(10);
|
||||
|
||||
String model = OllamaModelType.PHI;
|
||||
|
||||
@@ -43,7 +44,7 @@ public class Main {
|
||||
.add("How do I read a file in Go and print its contents to stdout?");
|
||||
|
||||
boolean raw = false;
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
|
||||
OllamaResult response = ollama.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
|
||||
System.out.println(response.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,15 +11,15 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs
|
||||
```java
|
||||
package io.github.ollama4j.localtests;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.ps.ModelProcessesResult;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) {
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
|
||||
Ollama ollama = new Ollama("http://localhost:11434");
|
||||
|
||||
ModelProcessesResult response = ollamaAPI.ps();
|
||||
ModelProcessesResult response = ollama.ps();
|
||||
|
||||
System.out.println(response);
|
||||
}
|
||||
|
||||
@@ -9,17 +9,18 @@ sidebar_position: 2
|
||||
This API lets you set the request timeout for the Ollama client.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
}
|
||||
ollama.setRequestTimeoutSeconds(10);
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -16,6 +16,7 @@ _Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._
|
||||
#### Add new role
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
@@ -23,9 +24,9 @@ public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role");
|
||||
OllamaChatMessageRole customRole = ollama.addCustomRole("custom-role");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -33,16 +34,16 @@ public class Main {
|
||||
#### List roles
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
List<OllamaChatMessageRole> roles = ollamaAPI.listRoles();
|
||||
List<OllamaChatMessageRole> roles = ollama.listRoles();
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -50,6 +51,7 @@ public class Main {
|
||||
#### Get role
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
@@ -57,9 +59,9 @@ public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role");
|
||||
List<OllamaChatMessageRole> roles = ollama.getRole("custom-role");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -112,14 +112,15 @@ or use other suitable implementations.
|
||||
Create a new Java class in your project and add this code.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
|
||||
public class OllamaAPITest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||
Ollama ollama = new Ollama();
|
||||
|
||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||
boolean isOllamaServerReachable = ollama.ping();
|
||||
|
||||
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||
}
|
||||
@@ -130,6 +131,7 @@ This uses the default Ollama host as `http://localhost:11434`.
|
||||
Specify a different Ollama host that you want to connect to.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
|
||||
public class OllamaAPITest {
|
||||
@@ -137,9 +139,9 @@ public class OllamaAPITest {
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||
boolean isOllamaServerReachable = ollama.ping();
|
||||
|
||||
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user