mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-10-13 17:08:57 +02:00
Merge pull request #194 from ollama4j/refactor
Refactor OllamaAPI to Ollama class and update documentation
This commit is contained in:
commit
73a0a48eab
4
.github/workflows/run-tests.yml
vendored
4
.github/workflows/run-tests.yml
vendored
@ -45,10 +45,10 @@ jobs:
|
||||
settings-path: ${{ github.workspace }}
|
||||
|
||||
- name: Run unit tests
|
||||
run: mvn clean test -Punit-tests
|
||||
run: make unit-tests
|
||||
|
||||
- name: Run integration tests
|
||||
run: mvn clean verify -Pintegration-tests
|
||||
run: make integration-tests-basic
|
||||
env:
|
||||
USE_EXTERNAL_OLLAMA_HOST: "true"
|
||||
OLLAMA_HOST: "http://localhost:11434"
|
10
Makefile
10
Makefile
@ -26,12 +26,16 @@ unit-tests: apply-formatting
|
||||
@echo "\033[0;34mRunning unit tests...\033[0m"
|
||||
@mvn clean test -Punit-tests
|
||||
|
||||
integration-tests: apply-formatting
|
||||
@echo "\033[0;34mRunning integration tests (local)...\033[0m"
|
||||
integration-tests-all: apply-formatting
|
||||
@echo "\033[0;34mRunning integration tests (local - all)...\033[0m"
|
||||
@export USE_EXTERNAL_OLLAMA_HOST=false && mvn clean verify -Pintegration-tests
|
||||
|
||||
integration-tests-basic: apply-formatting
|
||||
@echo "\033[0;34mRunning integration tests (local - basic)...\033[0m"
|
||||
@export USE_EXTERNAL_OLLAMA_HOST=false && mvn clean verify -Pintegration-tests -Dit.test=WithAuth
|
||||
|
||||
integration-tests-remote: apply-formatting
|
||||
@echo "\033[0;34mRunning integration tests (remote)...\033[0m"
|
||||
@echo "\033[0;34mRunning integration tests (remote - all)...\033[0m"
|
||||
@export USE_EXTERNAL_OLLAMA_HOST=true && export OLLAMA_HOST=http://192.168.29.229:11434 && mvn clean verify -Pintegration-tests -Dgpg.skip=true
|
||||
|
||||
doxygen:
|
||||
|
@ -17,11 +17,13 @@ The metrics integration provides the following metrics:
|
||||
### 1. Enable Metrics Collection
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
// Create API instance with metrics enabled
|
||||
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||
ollamaAPI.setMetricsEnabled(true);
|
||||
Ollama ollama = new Ollama();
|
||||
ollamaAPI.
|
||||
|
||||
setMetricsEnabled(true);
|
||||
```
|
||||
|
||||
### 2. Start Metrics Server
|
||||
@ -38,11 +40,11 @@ System.out.println("Metrics available at: http://localhost:8080/metrics");
|
||||
|
||||
```java
|
||||
// All API calls are automatically instrumented
|
||||
boolean isReachable = ollamaAPI.ping();
|
||||
boolean isReachable = ollama.ping();
|
||||
|
||||
Map<String, Object> format = new HashMap<>();
|
||||
format.put("type", "json");
|
||||
OllamaResult result = ollamaAPI.generateWithFormat(
|
||||
OllamaResult result = ollama.generateWithFormat(
|
||||
"llama2",
|
||||
"Generate a JSON object",
|
||||
format
|
||||
@ -100,13 +102,13 @@ ollama_tokens_generated_total{model_name="llama2"} 150.0
|
||||
### Enable/Disable Metrics
|
||||
|
||||
```java
|
||||
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||
OllamaAPI ollama = new OllamaAPI();
|
||||
|
||||
// Enable metrics collection
|
||||
ollamaAPI.setMetricsEnabled(true);
|
||||
ollama.setMetricsEnabled(true);
|
||||
|
||||
// Disable metrics collection (default)
|
||||
ollamaAPI.setMetricsEnabled(false);
|
||||
ollama.setMetricsEnabled(false);
|
||||
```
|
||||
|
||||
### Custom Metrics Server
|
||||
@ -149,14 +151,14 @@ You can create Grafana dashboards using the metrics. Some useful queries:
|
||||
|
||||
- Metrics collection adds minimal overhead (~1-2% in most cases)
|
||||
- Metrics are collected asynchronously and don't block API calls
|
||||
- You can disable metrics in production if needed: `ollamaAPI.setMetricsEnabled(false)`
|
||||
- You can disable metrics in production if needed: `ollama.setMetricsEnabled(false)`
|
||||
- The metrics server uses minimal resources
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Metrics Not Appearing
|
||||
|
||||
1. Ensure metrics are enabled: `ollamaAPI.setMetricsEnabled(true)`
|
||||
1. Ensure metrics are enabled: `ollama.setMetricsEnabled(true)`
|
||||
2. Check that the metrics server is running: `http://localhost:8080/metrics`
|
||||
3. Verify API calls are being made (metrics only appear after API usage)
|
||||
|
||||
|
@ -336,7 +336,7 @@ import com.couchbase.client.java.ClusterOptions;
|
||||
import com.couchbase.client.java.Scope;
|
||||
import com.couchbase.client.java.json.JsonObject;
|
||||
import com.couchbase.client.java.query.QueryResult;
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.exceptions.OllamaException;
|
||||
import io.github.ollama4j.exceptions.ToolInvocationException;
|
||||
import io.github.ollama4j.tools.OllamaToolsResult;
|
||||
@ -356,210 +356,210 @@ import java.util.Map;
|
||||
|
||||
public class CouchbaseToolCallingExample {
|
||||
|
||||
public static void main(String[] args) throws IOException, ToolInvocationException, OllamaException, InterruptedException {
|
||||
String connectionString = Utilities.getFromEnvVar("CB_CLUSTER_URL");
|
||||
String username = Utilities.getFromEnvVar("CB_CLUSTER_USERNAME");
|
||||
String password = Utilities.getFromEnvVar("CB_CLUSTER_PASSWORD");
|
||||
String bucketName = "travel-sample";
|
||||
public static void main(String[] args) throws IOException, ToolInvocationException, OllamaException, InterruptedException {
|
||||
String connectionString = Utilities.getFromEnvVar("CB_CLUSTER_URL");
|
||||
String username = Utilities.getFromEnvVar("CB_CLUSTER_USERNAME");
|
||||
String password = Utilities.getFromEnvVar("CB_CLUSTER_PASSWORD");
|
||||
String bucketName = "travel-sample";
|
||||
|
||||
Cluster cluster = Cluster.connect(
|
||||
connectionString,
|
||||
ClusterOptions.clusterOptions(username, password).environment(env -> {
|
||||
env.applyProfile("wan-development");
|
||||
})
|
||||
);
|
||||
Cluster cluster = Cluster.connect(
|
||||
connectionString,
|
||||
ClusterOptions.clusterOptions(username, password).environment(env -> {
|
||||
env.applyProfile("wan-development");
|
||||
})
|
||||
);
|
||||
|
||||
String host = Utilities.getFromConfig("host");
|
||||
String modelName = Utilities.getFromConfig("tools_model_mistral");
|
||||
String host = Utilities.getFromConfig("host");
|
||||
String modelName = Utilities.getFromConfig("tools_model_mistral");
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||
Ollama ollama = new Ollama(host);
|
||||
ollama.setRequestTimeoutSeconds(60);
|
||||
|
||||
Tools.ToolSpecification callSignFinderToolSpec = getCallSignFinderToolSpec(cluster, bucketName);
|
||||
Tools.ToolSpecification callSignUpdaterToolSpec = getCallSignUpdaterToolSpec(cluster, bucketName);
|
||||
Tools.ToolSpecification callSignFinderToolSpec = getCallSignFinderToolSpec(cluster, bucketName);
|
||||
Tools.ToolSpecification callSignUpdaterToolSpec = getCallSignUpdaterToolSpec(cluster, bucketName);
|
||||
|
||||
ollamaAPI.registerTool(callSignFinderToolSpec);
|
||||
ollamaAPI.registerTool(callSignUpdaterToolSpec);
|
||||
ollama.registerTool(callSignFinderToolSpec);
|
||||
ollama.registerTool(callSignUpdaterToolSpec);
|
||||
|
||||
String prompt1 = "What is the call-sign of Astraeus?";
|
||||
for (OllamaToolsResult.ToolResult r : ollamaAPI.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignFinderToolSpec)
|
||||
.withPrompt(prompt1)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
AirlineDetail airlineDetail = (AirlineDetail) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign of %s is '%s'! ✈️", r.getFunctionName(), airlineDetail.getName(), airlineDetail.getCallsign()));
|
||||
}
|
||||
|
||||
String prompt2 = "I want to code name Astraeus as STARBOUND";
|
||||
for (OllamaToolsResult.ToolResult r : ollamaAPI.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignUpdaterToolSpec)
|
||||
.withPrompt(prompt2)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
Boolean updated = (Boolean) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign is %s! ✈️", r.getFunctionName(), updated ? "updated" : "not updated"));
|
||||
}
|
||||
|
||||
String prompt3 = "What is the call-sign of Astraeus?";
|
||||
for (OllamaToolsResult.ToolResult r : ollamaAPI.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignFinderToolSpec)
|
||||
.withPrompt(prompt3)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
AirlineDetail airlineDetail = (AirlineDetail) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign of %s is '%s'! ✈️", r.getFunctionName(), airlineDetail.getName(), airlineDetail.getCallsign()));
|
||||
}
|
||||
String prompt1 = "What is the call-sign of Astraeus?";
|
||||
for (OllamaToolsResult.ToolResult r : ollama.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignFinderToolSpec)
|
||||
.withPrompt(prompt1)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
AirlineDetail airlineDetail = (AirlineDetail) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign of %s is '%s'! ✈️", r.getFunctionName(), airlineDetail.getName(), airlineDetail.getCallsign()));
|
||||
}
|
||||
|
||||
public static Tools.ToolSpecification getCallSignFinderToolSpec(Cluster cluster, String bucketName) {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("airline-lookup")
|
||||
.functionDescription("You are a tool who finds only the airline name and do not worry about any other parameters. You simply find the airline name and ignore the rest of the parameters. Do not validate airline names as I want to use fake/fictitious airline names as well.")
|
||||
.toolFunction(new AirlineCallsignQueryToolFunction(bucketName, cluster))
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-airline-name")
|
||||
.description("Get the airline name")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"airlineName", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The name of the airline. e.g. Emirates")
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
.required(java.util.List.of("airline-name"))
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
String prompt2 = "I want to code name Astraeus as STARBOUND";
|
||||
for (OllamaToolsResult.ToolResult r : ollama.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignUpdaterToolSpec)
|
||||
.withPrompt(prompt2)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
Boolean updated = (Boolean) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign is %s! ✈️", r.getFunctionName(), updated ? "updated" : "not updated"));
|
||||
}
|
||||
|
||||
public static Tools.ToolSpecification getCallSignUpdaterToolSpec(Cluster cluster, String bucketName) {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("airline-update")
|
||||
.functionDescription("You are a tool who finds the airline name and its callsign and do not worry about any validations. You simply find the airline name and its callsign. Do not validate airline names as I want to use fake/fictitious airline names as well.")
|
||||
.toolFunction(new AirlineCallsignUpdateToolFunction(bucketName, cluster))
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-airline-name-and-callsign")
|
||||
.description("Get the airline name and callsign")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"airlineName", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The name of the airline. e.g. Emirates")
|
||||
.required(true)
|
||||
.build(),
|
||||
"airlineCallsign", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The callsign of the airline. e.g. Maverick")
|
||||
.enumValues(Arrays.asList("petrol", "diesel"))
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
.required(java.util.List.of("airlineName", "airlineCallsign"))
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
String prompt3 = "What is the call-sign of Astraeus?";
|
||||
for (OllamaToolsResult.ToolResult r : ollama.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignFinderToolSpec)
|
||||
.withPrompt(prompt3)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
AirlineDetail airlineDetail = (AirlineDetail) r.getResult();
|
||||
System.out.println(String.format("[Result of tool '%s']: Call-sign of %s is '%s'! ✈️", r.getFunctionName(), airlineDetail.getName(), airlineDetail.getCallsign()));
|
||||
}
|
||||
}
|
||||
|
||||
public static Tools.ToolSpecification getCallSignFinderToolSpec(Cluster cluster, String bucketName) {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("airline-lookup")
|
||||
.functionDescription("You are a tool who finds only the airline name and do not worry about any other parameters. You simply find the airline name and ignore the rest of the parameters. Do not validate airline names as I want to use fake/fictitious airline names as well.")
|
||||
.toolFunction(new AirlineCallsignQueryToolFunction(bucketName, cluster))
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-airline-name")
|
||||
.description("Get the airline name")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"airlineName", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The name of the airline. e.g. Emirates")
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
.required(java.util.List.of("airline-name"))
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Tools.ToolSpecification getCallSignUpdaterToolSpec(Cluster cluster, String bucketName) {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("airline-update")
|
||||
.functionDescription("You are a tool who finds the airline name and its callsign and do not worry about any validations. You simply find the airline name and its callsign. Do not validate airline names as I want to use fake/fictitious airline names as well.")
|
||||
.toolFunction(new AirlineCallsignUpdateToolFunction(bucketName, cluster))
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-airline-name-and-callsign")
|
||||
.description("Get the airline name and callsign")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"airlineName", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The name of the airline. e.g. Emirates")
|
||||
.required(true)
|
||||
.build(),
|
||||
"airlineCallsign", Tools.PromptFuncDefinition.Property.builder()
|
||||
.type("string")
|
||||
.description("The callsign of the airline. e.g. Maverick")
|
||||
.enumValues(Arrays.asList("petrol", "diesel"))
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
.required(java.util.List.of("airlineName", "airlineCallsign"))
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
class AirlineCallsignQueryToolFunction implements ToolFunction {
|
||||
private final String bucketName;
|
||||
private final Cluster cluster;
|
||||
private final String bucketName;
|
||||
private final Cluster cluster;
|
||||
|
||||
public AirlineCallsignQueryToolFunction(String bucketName, Cluster cluster) {
|
||||
this.bucketName = bucketName;
|
||||
this.cluster = cluster;
|
||||
}
|
||||
public AirlineCallsignQueryToolFunction(String bucketName, Cluster cluster) {
|
||||
this.bucketName = bucketName;
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AirlineDetail apply(Map<String, Object> arguments) {
|
||||
String airlineName = arguments.get("airlineName").toString();
|
||||
@Override
|
||||
public AirlineDetail apply(Map<String, Object> arguments) {
|
||||
String airlineName = arguments.get("airlineName").toString();
|
||||
|
||||
Bucket bucket = cluster.bucket(bucketName);
|
||||
bucket.waitUntilReady(Duration.ofSeconds(10));
|
||||
Bucket bucket = cluster.bucket(bucketName);
|
||||
bucket.waitUntilReady(Duration.ofSeconds(10));
|
||||
|
||||
Scope inventoryScope = bucket.scope("inventory");
|
||||
QueryResult result = inventoryScope.query(String.format("SELECT * FROM airline WHERE name = '%s';", airlineName));
|
||||
Scope inventoryScope = bucket.scope("inventory");
|
||||
QueryResult result = inventoryScope.query(String.format("SELECT * FROM airline WHERE name = '%s';", airlineName));
|
||||
|
||||
JsonObject row = (JsonObject) result.rowsAsObject().get(0).get("airline");
|
||||
return new AirlineDetail(row.getString("callsign"), row.getString("name"), row.getString("country"));
|
||||
}
|
||||
JsonObject row = (JsonObject) result.rowsAsObject().get(0).get("airline");
|
||||
return new AirlineDetail(row.getString("callsign"), row.getString("name"), row.getString("country"));
|
||||
}
|
||||
}
|
||||
|
||||
class AirlineCallsignUpdateToolFunction implements ToolFunction {
|
||||
private final String bucketName;
|
||||
private final Cluster cluster;
|
||||
private final String bucketName;
|
||||
private final Cluster cluster;
|
||||
|
||||
public AirlineCallsignUpdateToolFunction(String bucketName, Cluster cluster) {
|
||||
this.bucketName = bucketName;
|
||||
this.cluster = cluster;
|
||||
public AirlineCallsignUpdateToolFunction(String bucketName, Cluster cluster) {
|
||||
this.bucketName = bucketName;
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Boolean apply(Map<String, Object> arguments) {
|
||||
String airlineName = arguments.get("airlineName").toString();
|
||||
String airlineNewCallsign = arguments.get("airlineCallsign").toString();
|
||||
|
||||
Bucket bucket = cluster.bucket(bucketName);
|
||||
bucket.waitUntilReady(Duration.ofSeconds(10));
|
||||
|
||||
Scope inventoryScope = bucket.scope("inventory");
|
||||
String query = String.format("SELECT * FROM airline WHERE name = '%s';", airlineName);
|
||||
|
||||
QueryResult result;
|
||||
try {
|
||||
result = inventoryScope.query(query);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error executing query", e);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Boolean apply(Map<String, Object> arguments) {
|
||||
String airlineName = arguments.get("airlineName").toString();
|
||||
String airlineNewCallsign = arguments.get("airlineCallsign").toString();
|
||||
|
||||
Bucket bucket = cluster.bucket(bucketName);
|
||||
bucket.waitUntilReady(Duration.ofSeconds(10));
|
||||
|
||||
Scope inventoryScope = bucket.scope("inventory");
|
||||
String query = String.format("SELECT * FROM airline WHERE name = '%s';", airlineName);
|
||||
|
||||
QueryResult result;
|
||||
try {
|
||||
result = inventoryScope.query(query);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error executing query", e);
|
||||
}
|
||||
|
||||
if (result.rowsAsObject().isEmpty()) {
|
||||
throw new RuntimeException("Airline not found with name: " + airlineName);
|
||||
}
|
||||
|
||||
JsonObject row = (JsonObject) result.rowsAsObject().get(0).get("airline");
|
||||
|
||||
if (row == null) {
|
||||
throw new RuntimeException("Airline data is missing or corrupted.");
|
||||
}
|
||||
|
||||
String currentCallsign = row.getString("callsign");
|
||||
|
||||
if (!airlineNewCallsign.equals(currentCallsign)) {
|
||||
JsonObject updateQuery = JsonObject.create()
|
||||
.put("callsign", airlineNewCallsign);
|
||||
|
||||
inventoryScope.query(String.format(
|
||||
"UPDATE airline SET callsign = '%s' WHERE name = '%s';",
|
||||
airlineNewCallsign, airlineName
|
||||
));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
if (result.rowsAsObject().isEmpty()) {
|
||||
throw new RuntimeException("Airline not found with name: " + airlineName);
|
||||
}
|
||||
|
||||
JsonObject row = (JsonObject) result.rowsAsObject().get(0).get("airline");
|
||||
|
||||
if (row == null) {
|
||||
throw new RuntimeException("Airline data is missing or corrupted.");
|
||||
}
|
||||
|
||||
String currentCallsign = row.getString("callsign");
|
||||
|
||||
if (!airlineNewCallsign.equals(currentCallsign)) {
|
||||
JsonObject updateQuery = JsonObject.create()
|
||||
.put("callsign", airlineNewCallsign);
|
||||
|
||||
inventoryScope.query(String.format(
|
||||
"UPDATE airline SET callsign = '%s' WHERE name = '%s';",
|
||||
airlineNewCallsign, airlineName
|
||||
));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("ALL")
|
||||
@ -567,9 +567,9 @@ class AirlineCallsignUpdateToolFunction implements ToolFunction {
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
class AirlineDetail {
|
||||
private String callsign;
|
||||
private String name;
|
||||
private String country;
|
||||
private String callsign;
|
||||
private String name;
|
||||
private String country;
|
||||
}
|
||||
```
|
||||
|
||||
@ -578,9 +578,9 @@ class AirlineDetail {
|
||||
#### 1. Ollama API Client Setup
|
||||
|
||||
```javascript
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaAPI ollama = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||
ollama.setRequestTimeoutSeconds(60);
|
||||
```
|
||||
|
||||
Here, we initialize the Ollama API client and configure it with the host of the Ollama server, where the model is hosted
|
||||
@ -595,7 +595,7 @@ queries the database for airline details based on the airline name.
|
||||
```javascript
|
||||
Tools.ToolSpecification callSignFinderToolSpec = getCallSignFinderToolSpec(cluster, bucketName);
|
||||
|
||||
ollamaAPI.registerTool(callSignFinderToolSpec);
|
||||
ollama.registerTool(callSignFinderToolSpec);
|
||||
```
|
||||
|
||||
This step registers custom tools with Ollama that allows the tool-calling model to invoke database queries.
|
||||
@ -619,7 +619,7 @@ String prompt = "What is the call-sign of Astraeus?";
|
||||
#### 5. Generating Results with Tools
|
||||
|
||||
```javascript
|
||||
for (OllamaToolsResult.ToolResult r : ollamaAPI.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
for (OllamaToolsResult.ToolResult r : ollama.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignFinderToolSpec)
|
||||
.withPrompt(prompt)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
@ -649,7 +649,7 @@ then update the airline’s callsign.
|
||||
```javascript
|
||||
Tools.ToolSpecification callSignUpdaterToolSpec = getCallSignUpdaterToolSpec(cluster, bucketName);
|
||||
|
||||
ollamaAPI.registerTool(callSignUpdaterToolSpec);
|
||||
ollama.registerTool(callSignUpdaterToolSpec);
|
||||
```
|
||||
|
||||
The tool will execute a Couchbase N1QL query to update the airline’s callsign.
|
||||
@ -671,7 +671,7 @@ And then we invoke the model with the new prompt.
|
||||
|
||||
```javascript
|
||||
String prompt = "I want to code name Astraeus as STARBOUND";
|
||||
for (OllamaToolsResult.ToolResult r : ollamaAPI.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
for (OllamaToolsResult.ToolResult r : ollama.generateWithTools(modelName, new Tools.PromptBuilder()
|
||||
.withToolSpecification(callSignUpdaterToolSpec)
|
||||
.withPrompt(prompt)
|
||||
.build(), new OptionsBuilder().build()).getToolResults()) {
|
||||
|
@ -10,7 +10,7 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
|
||||
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
@ -18,9 +18,9 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setBasicAuth("username", "password");
|
||||
ollama.setBasicAuth("username", "password");
|
||||
}
|
||||
}
|
||||
```
|
@ -10,7 +10,7 @@ Ollama server would be setup behind a gateway/reverse proxy with bearer auth.
|
||||
After configuring bearer authentication, all subsequent requests will include the Bearer Auth header.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
@ -18,9 +18,9 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setBearerAuth("YOUR-TOKEN");
|
||||
ollama.setBearerAuth("YOUR-TOKEN");
|
||||
}
|
||||
}
|
||||
```
|
@ -36,7 +36,7 @@ from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ol
|
||||
## Build an empty `Options` object
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.utils.Options;
|
||||
import io.github.ollama4j.utils.OptionsBuilder;
|
||||
|
||||
@ -46,7 +46,7 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
Options options = new OptionsBuilder().build();
|
||||
}
|
||||
@ -65,7 +65,7 @@ public class Main {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
Options options =
|
||||
new OptionsBuilder()
|
||||
@ -74,6 +74,15 @@ public class Main {
|
||||
.setNumGpu(2)
|
||||
.setTemperature(1.5f)
|
||||
.build();
|
||||
|
||||
OllamaResult result =
|
||||
ollama.generate(
|
||||
OllamaGenerateRequestBuilder.builder()
|
||||
.withModel(model)
|
||||
.withPrompt("Who are you?")
|
||||
.withOptions(options)
|
||||
.build(),
|
||||
null);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
@ -7,16 +7,16 @@ sidebar_position: 5
|
||||
This API lets you check the reachability of Ollama server.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.ping();
|
||||
ollama.ping();
|
||||
}
|
||||
}
|
||||
```
|
@ -8,7 +8,7 @@ This is designed for prompt engineering. It allows you to easily build the promp
|
||||
inferences.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.response.OllamaResult;
|
||||
import io.github.ollama4j.types.OllamaModelType;
|
||||
import io.github.ollama4j.utils.OptionsBuilder;
|
||||
@ -18,8 +18,8 @@ public class Main {
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
Ollama ollama = new Ollama(host);
|
||||
ollama.setRequestTimeoutSeconds(10);
|
||||
|
||||
String model = OllamaModelType.PHI;
|
||||
|
||||
@ -43,7 +43,7 @@ public class Main {
|
||||
.add("How do I read a file in Go and print its contents to stdout?");
|
||||
|
||||
boolean raw = false;
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
|
||||
OllamaResult response = ollama.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
|
||||
System.out.println(response.getResponse());
|
||||
}
|
||||
}
|
||||
|
@ -11,15 +11,15 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs
|
||||
```java
|
||||
package io.github.ollama4j.localtests;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.ps.ModelProcessesResult;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) {
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
|
||||
Ollama ollama = new Ollama("http://localhost:11434");
|
||||
|
||||
ModelProcessesResult response = ollamaAPI.ps();
|
||||
ModelProcessesResult response = ollama.ps();
|
||||
|
||||
System.out.println(response);
|
||||
}
|
||||
|
@ -9,17 +9,17 @@ sidebar_position: 2
|
||||
This API lets you set the request timeout for the Ollama client.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
}
|
||||
ollama.setRequestTimeoutSeconds(10);
|
||||
}
|
||||
}
|
||||
```
|
@ -16,7 +16,7 @@ experience.
|
||||
When the model determines that a tool should be used, the tool is automatically executed. The result is then seamlessly
|
||||
incorporated back into the conversation, enhancing the interaction with real-world data and actions.
|
||||
|
||||
The following example demonstrates usage of a simple tool, registered with the `OllamaAPI`, and then used within a chat
|
||||
The following example demonstrates usage of a simple tool, registered with the `Ollama`, and then used within a chat
|
||||
session. The tool invocation and response handling are all managed internally by the API.
|
||||
|
||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatWithTools.java"/>
|
||||
@ -33,7 +33,7 @@ This tool calling can also be done using the streaming API.
|
||||
|
||||
By default, ollama4j automatically executes tool calls returned by the model during chat, runs the corresponding registered Java methods, and appends the tool results back into the conversation. For some applications, you may want to intercept tool calls and decide yourself when and how to execute them (for example, to queue them, to show a confirmation UI to the user, to run them in a sandbox, or to perform multi‑step orchestration).
|
||||
|
||||
To enable this behavior, set the useTools flag to true on your OllamaAPI instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually.
|
||||
To enable this behavior, set the useTools flag to true on your Ollama instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually.
|
||||
|
||||
|
||||
Notes:
|
||||
@ -57,10 +57,10 @@ To use a method as a tool within a chat call, follow these steps:
|
||||
* `java.lang.Boolean`
|
||||
* `java.math.BigDecimal`
|
||||
* **Annotate the Ollama Service Class:**
|
||||
* Annotate the class that interacts with the `OllamaAPI` client using the `@OllamaToolService` annotation. Reference
|
||||
* Annotate the class that interacts with the `Ollama` client using the `@OllamaToolService` annotation. Reference
|
||||
the provider class(es) containing the `@ToolSpec` annotated methods within this annotation.
|
||||
* **Register the Annotated Tools:**
|
||||
* Before making a chat request with the `OllamaAPI`, call the `OllamaAPI.registerAnnotatedTools()` method. This
|
||||
* Before making a chat request with the `Ollama`, call the `Ollama.registerAnnotatedTools()` method. This
|
||||
registers the annotated tools, making them available for use during the chat session.
|
||||
|
||||
Let's try an example. Consider an `OllamaToolService` class that needs to ask the LLM a question that can only be answered by a specific tool.
|
||||
|
@ -16,16 +16,16 @@ _Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._
|
||||
#### Add new role
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role");
|
||||
OllamaChatMessageRole customRole = ollama.addCustomRole("custom-role");
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -33,16 +33,16 @@ public class Main {
|
||||
#### List roles
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
List<OllamaChatMessageRole> roles = ollamaAPI.listRoles();
|
||||
List<OllamaChatMessageRole> roles = ollama.listRoles();
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -50,16 +50,16 @@ public class Main {
|
||||
#### Get role
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role");
|
||||
List<OllamaChatMessageRole> roles = ollama.getRole("custom-role");
|
||||
}
|
||||
}
|
||||
```
|
@ -112,14 +112,14 @@ or use other suitable implementations.
|
||||
Create a new Java class in your project and add this code.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class OllamaAPITest {
|
||||
public class OllamaTest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||
Ollama ollama = new Ollama();
|
||||
|
||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||
boolean isOllamaServerReachable = ollama.ping();
|
||||
|
||||
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||
}
|
||||
@ -130,16 +130,16 @@ This uses the default Ollama host as `http://localhost:11434`.
|
||||
Specify a different Ollama host that you want to connect to.
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
|
||||
public class OllamaAPITest {
|
||||
public class OllamaTest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
Ollama ollama = new Ollama(host);
|
||||
|
||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||
boolean isOllamaServerReachable = ollama.ping();
|
||||
|
||||
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||
}
|
||||
|
@ -24,7 +24,6 @@ const config = {
|
||||
projectName: 'ollama4j', // Usually your repo name.
|
||||
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
||||
// Even if you don't use internationalization, you can use this field to set
|
||||
// useful metadata like html lang. For example, if your site is Chinese, you
|
||||
@ -175,6 +174,9 @@ const config = {
|
||||
}),
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
hooks: {
|
||||
onBrokenMarkdownLinks: 'warn'
|
||||
}
|
||||
},
|
||||
themes: ['@docusaurus/theme-mermaid']
|
||||
};
|
||||
|
@ -53,9 +53,9 @@ import org.slf4j.LoggerFactory;
|
||||
* <p>This class provides methods for model management, chat, embeddings, tool registration, and more.
|
||||
*/
|
||||
@SuppressWarnings({"DuplicatedCode", "resource", "SpellCheckingInspection"})
|
||||
public class OllamaAPI {
|
||||
public class Ollama {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Ollama.class);
|
||||
|
||||
private final String host;
|
||||
private Auth auth;
|
||||
@ -107,7 +107,7 @@ public class OllamaAPI {
|
||||
/**
|
||||
* Instantiates the Ollama API with the default Ollama host: {@code http://localhost:11434}
|
||||
*/
|
||||
public OllamaAPI() {
|
||||
public Ollama() {
|
||||
this.host = "http://localhost:11434";
|
||||
}
|
||||
|
||||
@ -116,7 +116,7 @@ public class OllamaAPI {
|
||||
*
|
||||
* @param host the host address of the Ollama server
|
||||
*/
|
||||
public OllamaAPI(String host) {
|
||||
public Ollama(String host) {
|
||||
if (host.endsWith("/")) {
|
||||
this.host = host.substring(0, host.length() - 1);
|
||||
} else {
|
@ -8,7 +8,7 @@
|
||||
*/
|
||||
package io.github.ollama4j.tools.annotations;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
@ -18,7 +18,7 @@ import java.lang.annotation.Target;
|
||||
* Annotation to mark a class as an Ollama tool service.
|
||||
* <p>
|
||||
* When a class is annotated with {@code @OllamaToolService}, the method
|
||||
* {@link OllamaAPI#registerAnnotatedTools()} can be used to automatically register all tool provider
|
||||
* {@link Ollama#registerAnnotatedTools()} can be used to automatically register all tool provider
|
||||
* classes specified in the {@link #providers()} array. All methods in those provider classes that are
|
||||
* annotated with {@link ToolSpec} will be registered as tools.
|
||||
* </p>
|
||||
|
@ -8,7 +8,7 @@
|
||||
*/
|
||||
package io.github.ollama4j.tools.annotations;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
@ -16,7 +16,7 @@ import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation to mark a method as a tool that can be registered automatically by
|
||||
* {@link OllamaAPI#registerAnnotatedTools()}.
|
||||
* {@link Ollama#registerAnnotatedTools()}.
|
||||
* <p>
|
||||
* Methods annotated with {@code @ToolSpec} will be discovered and registered as tools
|
||||
* when the containing class is specified as a provider in {@link OllamaToolService}.
|
||||
|
@ -10,7 +10,7 @@ package io.github.ollama4j.integrationtests;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.exceptions.OllamaException;
|
||||
import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler;
|
||||
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
|
||||
@ -44,11 +44,11 @@ import org.testcontainers.ollama.OllamaContainer;
|
||||
@OllamaToolService(providers = {AnnotatedTool.class})
|
||||
@TestMethodOrder(OrderAnnotation.class)
|
||||
@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection", "FieldCanBeLocal", "ConstantValue"})
|
||||
class OllamaAPIIntegrationTest {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPIIntegrationTest.class);
|
||||
class OllamaIntegrationTest {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaIntegrationTest.class);
|
||||
|
||||
private static OllamaContainer ollama;
|
||||
private static OllamaAPI api;
|
||||
private static Ollama api;
|
||||
|
||||
private static final String EMBEDDING_MODEL = "all-minilm";
|
||||
private static final String VISION_MODEL = "moondream:1.8b";
|
||||
@ -58,9 +58,9 @@ class OllamaAPIIntegrationTest {
|
||||
private static final String TOOLS_MODEL = "mistral:7b";
|
||||
|
||||
/**
|
||||
* Initializes the OllamaAPI instance for integration tests.
|
||||
* Initializes the Ollama instance for integration tests.
|
||||
*
|
||||
* <p>This method sets up the OllamaAPI client, either using an external Ollama host (if
|
||||
* <p>This method sets up the Ollama client, either using an external Ollama host (if
|
||||
* environment variables are set) or by starting a Testcontainers-based Ollama instance. It also
|
||||
* configures request timeout and model pull retry settings.
|
||||
*/
|
||||
@ -81,7 +81,7 @@ class OllamaAPIIntegrationTest {
|
||||
Properties props = new Properties();
|
||||
try {
|
||||
props.load(
|
||||
OllamaAPIIntegrationTest.class
|
||||
OllamaIntegrationTest.class
|
||||
.getClassLoader()
|
||||
.getResourceAsStream("test-config.properties"));
|
||||
} catch (Exception e) {
|
||||
@ -103,7 +103,7 @@ class OllamaAPIIntegrationTest {
|
||||
|
||||
if (useExternalOllamaHost) {
|
||||
LOG.info("Using external Ollama host: {}", ollamaHost);
|
||||
api = new OllamaAPI(ollamaHost);
|
||||
api = new Ollama(ollamaHost);
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"USE_EXTERNAL_OLLAMA_HOST is not set so, we will be using Testcontainers"
|
||||
@ -124,7 +124,7 @@ class OllamaAPIIntegrationTest {
|
||||
ollama.start();
|
||||
LOG.info("Using Testcontainer Ollama host...");
|
||||
api =
|
||||
new OllamaAPI(
|
||||
new Ollama(
|
||||
"http://"
|
||||
+ ollama.getHost()
|
||||
+ ":"
|
||||
@ -143,8 +143,8 @@ class OllamaAPIIntegrationTest {
|
||||
@Test
|
||||
@Order(1)
|
||||
void shouldThrowConnectExceptionForWrongEndpoint() {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
|
||||
assertThrows(OllamaException.class, ollamaAPI::listModels);
|
||||
Ollama ollama = new Ollama("http://wrong-host:11434");
|
||||
assertThrows(OllamaException.class, ollama::listModels);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -778,7 +778,7 @@ class OllamaAPIIntegrationTest {
|
||||
Collections.emptyList(),
|
||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
|
||||
.build();
|
||||
api.registerAnnotatedTools(new OllamaAPIIntegrationTest());
|
||||
api.registerAnnotatedTools(new OllamaIntegrationTest());
|
||||
|
||||
OllamaChatResult chatResult = api.chat(requestModel, null);
|
||||
assertNotNull(chatResult);
|
@ -10,7 +10,7 @@ package io.github.ollama4j.integrationtests;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.exceptions.OllamaException;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
||||
@ -22,7 +22,6 @@ import io.github.ollama4j.utils.OptionsBuilder;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@ -62,7 +61,7 @@ public class WithAuth {
|
||||
|
||||
private static OllamaContainer ollama;
|
||||
private static GenericContainer<?> nginx;
|
||||
private static OllamaAPI api;
|
||||
private static Ollama api;
|
||||
|
||||
@BeforeAll
|
||||
static void setUp() {
|
||||
@ -74,7 +73,7 @@ public class WithAuth {
|
||||
|
||||
LOG.info("Using Testcontainer Ollama host...");
|
||||
|
||||
api = new OllamaAPI("http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT));
|
||||
api = new Ollama("http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT));
|
||||
api.setRequestTimeoutSeconds(120);
|
||||
api.setNumberOfRetriesForModelPull(3);
|
||||
|
||||
@ -88,7 +87,7 @@ public class WithAuth {
|
||||
+ "→ Proxy URL: {}",
|
||||
ollamaUrl,
|
||||
nginxUrl);
|
||||
LOG.info("OllamaAPI initialized with bearer auth token: {}", BEARER_AUTH_TOKEN);
|
||||
LOG.info("Ollama initialized with bearer auth token: {}", BEARER_AUTH_TOKEN);
|
||||
}
|
||||
|
||||
private static OllamaContainer createOllamaContainer() {
|
||||
@ -155,9 +154,9 @@ public class WithAuth {
|
||||
try {
|
||||
assertTrue(
|
||||
api.ping(),
|
||||
"Expected OllamaAPI to successfully ping through NGINX with valid auth token.");
|
||||
"Expected Ollama to successfully ping through NGINX with valid auth token.");
|
||||
} catch (Exception e) {
|
||||
fail("Exception occurred while pinging OllamaAPI through NGINX: " + e.getMessage(), e);
|
||||
fail("Exception occurred while pinging Ollama through NGINX: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -168,20 +167,18 @@ public class WithAuth {
|
||||
try {
|
||||
assertFalse(
|
||||
api.ping(),
|
||||
"Expected OllamaAPI ping to fail through NGINX with an invalid auth token.");
|
||||
"Expected Ollama ping to fail through NGINX with an invalid auth token.");
|
||||
} catch (Exception e) {
|
||||
// If an exception is thrown, that's also an expected failure for a wrong token
|
||||
// (e.g., OllamaBaseException or IOException)
|
||||
// Optionally, you can assert the type/message of the exception if needed
|
||||
// For now, we treat any exception as a pass for this negative test
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2)
|
||||
void testAskModelWithStructuredOutput()
|
||||
throws OllamaException, IOException, InterruptedException, URISyntaxException {
|
||||
void testAskModelWithStructuredOutput() throws OllamaException, IOException {
|
||||
api.setBearerAuth(BEARER_AUTH_TOKEN);
|
||||
String model = GENERAL_PURPOSE_MODEL;
|
||||
api.pullModel(model);
|
||||
|
@ -12,7 +12,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.Ollama;
|
||||
import io.github.ollama4j.exceptions.OllamaException;
|
||||
import io.github.ollama4j.exceptions.RoleNotFoundException;
|
||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||
@ -36,12 +36,12 @@ import org.mockito.Mockito;
|
||||
class TestMockedAPIs {
|
||||
@Test
|
||||
void testPullModel() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
try {
|
||||
doNothing().when(ollamaAPI).pullModel(model);
|
||||
ollamaAPI.pullModel(model);
|
||||
verify(ollamaAPI, times(1)).pullModel(model);
|
||||
doNothing().when(ollama).pullModel(model);
|
||||
ollama.pullModel(model);
|
||||
verify(ollama, times(1)).pullModel(model);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -49,11 +49,11 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testListModels() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
try {
|
||||
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
|
||||
ollamaAPI.listModels();
|
||||
verify(ollamaAPI, times(1)).listModels();
|
||||
when(ollama.listModels()).thenReturn(new ArrayList<>());
|
||||
ollama.listModels();
|
||||
verify(ollama, times(1)).listModels();
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -61,7 +61,7 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testCreateModel() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
CustomModelRequest customModelRequest =
|
||||
CustomModelRequest.builder()
|
||||
.model("mario")
|
||||
@ -69,9 +69,9 @@ class TestMockedAPIs {
|
||||
.system("You are Mario from Super Mario Bros.")
|
||||
.build();
|
||||
try {
|
||||
doNothing().when(ollamaAPI).createModel(customModelRequest);
|
||||
ollamaAPI.createModel(customModelRequest);
|
||||
verify(ollamaAPI, times(1)).createModel(customModelRequest);
|
||||
doNothing().when(ollama).createModel(customModelRequest);
|
||||
ollama.createModel(customModelRequest);
|
||||
verify(ollama, times(1)).createModel(customModelRequest);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -79,12 +79,12 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testDeleteModel() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
try {
|
||||
doNothing().when(ollamaAPI).deleteModel(model, true);
|
||||
ollamaAPI.deleteModel(model, true);
|
||||
verify(ollamaAPI, times(1)).deleteModel(model, true);
|
||||
doNothing().when(ollama).deleteModel(model, true);
|
||||
ollama.deleteModel(model, true);
|
||||
verify(ollama, times(1)).deleteModel(model, true);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -92,12 +92,12 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testGetModelDetails() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
try {
|
||||
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
|
||||
ollamaAPI.getModelDetails(model);
|
||||
verify(ollamaAPI, times(1)).getModelDetails(model);
|
||||
when(ollama.getModelDetails(model)).thenReturn(new ModelDetail());
|
||||
ollama.getModelDetails(model);
|
||||
verify(ollama, times(1)).getModelDetails(model);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -105,16 +105,16 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testGenerateEmbeddings() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
String prompt = "some prompt text";
|
||||
try {
|
||||
OllamaEmbedRequest m = new OllamaEmbedRequest();
|
||||
m.setModel(model);
|
||||
m.setInput(List.of(prompt));
|
||||
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult());
|
||||
ollamaAPI.embed(m);
|
||||
verify(ollamaAPI, times(1)).embed(m);
|
||||
when(ollama.embed(m)).thenReturn(new OllamaEmbedResult());
|
||||
ollama.embed(m);
|
||||
verify(ollama, times(1)).embed(m);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -122,14 +122,14 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testEmbed() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
List<String> inputs = List.of("some prompt text");
|
||||
try {
|
||||
OllamaEmbedRequest m = new OllamaEmbedRequest(model, inputs);
|
||||
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult());
|
||||
ollamaAPI.embed(m);
|
||||
verify(ollamaAPI, times(1)).embed(m);
|
||||
when(ollama.embed(m)).thenReturn(new OllamaEmbedResult());
|
||||
ollama.embed(m);
|
||||
verify(ollama, times(1)).embed(m);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -137,14 +137,14 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testEmbedWithEmbedRequestModel() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
List<String> inputs = List.of("some prompt text");
|
||||
try {
|
||||
when(ollamaAPI.embed(new OllamaEmbedRequest(model, inputs)))
|
||||
when(ollama.embed(new OllamaEmbedRequest(model, inputs)))
|
||||
.thenReturn(new OllamaEmbedResult());
|
||||
ollamaAPI.embed(new OllamaEmbedRequest(model, inputs));
|
||||
verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequest(model, inputs));
|
||||
ollama.embed(new OllamaEmbedRequest(model, inputs));
|
||||
verify(ollama, times(1)).embed(new OllamaEmbedRequest(model, inputs));
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -152,7 +152,7 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testAsk() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
String prompt = "some prompt text";
|
||||
OllamaGenerateStreamObserver observer = new OllamaGenerateStreamObserver(null, null);
|
||||
@ -165,10 +165,9 @@ class TestMockedAPIs {
|
||||
.withThink(false)
|
||||
.withStreaming(false)
|
||||
.build();
|
||||
when(ollamaAPI.generate(request, observer))
|
||||
.thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollamaAPI.generate(request, observer);
|
||||
verify(ollamaAPI, times(1)).generate(request, observer);
|
||||
when(ollama.generate(request, observer)).thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollama.generate(request, observer);
|
||||
verify(ollama, times(1)).generate(request, observer);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -176,7 +175,7 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testAskWithImageFiles() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
String prompt = "some prompt text";
|
||||
try {
|
||||
@ -192,9 +191,9 @@ class TestMockedAPIs {
|
||||
.withFormat(null)
|
||||
.build();
|
||||
OllamaGenerateStreamObserver handler = null;
|
||||
when(ollamaAPI.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollamaAPI.generate(request, handler);
|
||||
verify(ollamaAPI, times(1)).generate(request, handler);
|
||||
when(ollama.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollama.generate(request, handler);
|
||||
verify(ollama, times(1)).generate(request, handler);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -202,7 +201,7 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testAskWithImageURLs() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
String prompt = "some prompt text";
|
||||
try {
|
||||
@ -218,9 +217,9 @@ class TestMockedAPIs {
|
||||
.withFormat(null)
|
||||
.build();
|
||||
OllamaGenerateStreamObserver handler = null;
|
||||
when(ollamaAPI.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollamaAPI.generate(request, handler);
|
||||
verify(ollamaAPI, times(1)).generate(request, handler);
|
||||
when(ollama.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200));
|
||||
ollama.generate(request, handler);
|
||||
verify(ollama, times(1)).generate(request, handler);
|
||||
} catch (OllamaException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (IOException e) {
|
||||
@ -230,56 +229,55 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testAskAsync() throws OllamaException {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
String model = "llama2";
|
||||
String prompt = "some prompt text";
|
||||
when(ollamaAPI.generateAsync(model, prompt, false, false))
|
||||
when(ollama.generateAsync(model, prompt, false, false))
|
||||
.thenReturn(new OllamaAsyncResultStreamer(null, null, 3));
|
||||
ollamaAPI.generateAsync(model, prompt, false, false);
|
||||
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false, false);
|
||||
ollama.generateAsync(model, prompt, false, false);
|
||||
verify(ollama, times(1)).generateAsync(model, prompt, false, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddCustomRole() {
|
||||
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
|
||||
Ollama ollama = mock(Ollama.class);
|
||||
String roleName = "custom-role";
|
||||
OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName);
|
||||
when(ollamaAPI.addCustomRole(roleName)).thenReturn(expectedRole);
|
||||
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole(roleName);
|
||||
when(ollama.addCustomRole(roleName)).thenReturn(expectedRole);
|
||||
OllamaChatMessageRole customRole = ollama.addCustomRole(roleName);
|
||||
assertEquals(expectedRole, customRole);
|
||||
verify(ollamaAPI, times(1)).addCustomRole(roleName);
|
||||
verify(ollama, times(1)).addCustomRole(roleName);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testListRoles() {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
Ollama ollama = Mockito.mock(Ollama.class);
|
||||
OllamaChatMessageRole role1 = OllamaChatMessageRole.newCustomRole("role1");
|
||||
OllamaChatMessageRole role2 = OllamaChatMessageRole.newCustomRole("role2");
|
||||
List<OllamaChatMessageRole> expectedRoles = List.of(role1, role2);
|
||||
when(ollamaAPI.listRoles()).thenReturn(expectedRoles);
|
||||
List<OllamaChatMessageRole> actualRoles = ollamaAPI.listRoles();
|
||||
when(ollama.listRoles()).thenReturn(expectedRoles);
|
||||
List<OllamaChatMessageRole> actualRoles = ollama.listRoles();
|
||||
assertEquals(expectedRoles, actualRoles);
|
||||
verify(ollamaAPI, times(1)).listRoles();
|
||||
verify(ollama, times(1)).listRoles();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetRoleNotFound() {
|
||||
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
|
||||
Ollama ollama = mock(Ollama.class);
|
||||
String roleName = "non-existing-role";
|
||||
try {
|
||||
when(ollamaAPI.getRole(roleName))
|
||||
.thenThrow(new RoleNotFoundException("Role not found"));
|
||||
when(ollama.getRole(roleName)).thenThrow(new RoleNotFoundException("Role not found"));
|
||||
} catch (RoleNotFoundException exception) {
|
||||
throw new RuntimeException("Failed to run test: testGetRoleNotFound");
|
||||
}
|
||||
try {
|
||||
ollamaAPI.getRole(roleName);
|
||||
ollama.getRole(roleName);
|
||||
fail("Expected RoleNotFoundException not thrown");
|
||||
} catch (RoleNotFoundException exception) {
|
||||
assertEquals("Role not found", exception.getMessage());
|
||||
}
|
||||
try {
|
||||
verify(ollamaAPI, times(1)).getRole(roleName);
|
||||
verify(ollama, times(1)).getRole(roleName);
|
||||
} catch (RoleNotFoundException exception) {
|
||||
throw new RuntimeException("Failed to run test: testGetRoleNotFound");
|
||||
}
|
||||
@ -287,18 +285,18 @@ class TestMockedAPIs {
|
||||
|
||||
@Test
|
||||
void testGetRoleFound() {
|
||||
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
|
||||
Ollama ollama = mock(Ollama.class);
|
||||
String roleName = "existing-role";
|
||||
OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName);
|
||||
try {
|
||||
when(ollamaAPI.getRole(roleName)).thenReturn(expectedRole);
|
||||
when(ollama.getRole(roleName)).thenReturn(expectedRole);
|
||||
} catch (RoleNotFoundException exception) {
|
||||
throw new RuntimeException("Failed to run test: testGetRoleFound");
|
||||
}
|
||||
try {
|
||||
OllamaChatMessageRole actualRole = ollamaAPI.getRole(roleName);
|
||||
OllamaChatMessageRole actualRole = ollama.getRole(roleName);
|
||||
assertEquals(expectedRole, actualRole);
|
||||
verify(ollamaAPI, times(1)).getRole(roleName);
|
||||
verify(ollama, times(1)).getRole(roleName);
|
||||
} catch (RoleNotFoundException exception) {
|
||||
throw new RuntimeException("Failed to run test: testGetRoleFound");
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user