forked from Mirror/ollama4j
Update APIs from ask
to generate
This commit is contained in:
parent
327ae7437f
commit
60fe5d6ffb
@ -67,7 +67,7 @@ In your Maven project, add this dependency:
|
||||
<dependency>
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.29</version>
|
||||
<version>1.0.47</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Ask - Async
|
||||
# Generate - Async
|
||||
|
||||
This API lets you ask questions to the LLMs in a asynchronous way.
|
||||
These APIs correlate to
|
||||
@ -19,7 +19,7 @@ public class Main {
|
||||
|
||||
String prompt = "Who are you?";
|
||||
|
||||
OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
|
||||
OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
|
||||
|
||||
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
|
||||
// poll for data from the response stream
|
||||
|
@ -2,7 +2,7 @@
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Ask - With Image Files
|
||||
# Generate - With Image Files
|
||||
|
||||
This API lets you ask questions along with the image files to the LLMs.
|
||||
These APIs correlate to
|
||||
@ -15,7 +15,7 @@ recommended.
|
||||
|
||||
:::
|
||||
|
||||
## Ask (Sync)
|
||||
## Synchronous mode
|
||||
|
||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
||||
|
||||
@ -29,7 +29,7 @@ public class Main {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
|
||||
OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
|
||||
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
|
||||
"What's in this image?",
|
||||
List.of(
|
||||
new File("/path/to/image")));
|
||||
|
@ -2,7 +2,7 @@
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Ask - With Image URLs
|
||||
# Generate - With Image URLs
|
||||
|
||||
This API lets you ask questions along with the image files to the LLMs.
|
||||
These APIs correlate to
|
||||
@ -29,7 +29,7 @@ public class Main {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
|
||||
OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
|
||||
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
|
||||
"What's in this image?",
|
||||
List.of(
|
||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
|
||||
|
@ -2,7 +2,7 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Ask - Sync
|
||||
# Generate - Sync
|
||||
|
||||
This API lets you ask questions to the LLMs in a synchronous way.
|
||||
These APIs correlate to
|
||||
@ -25,7 +25,7 @@ public class Main {
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaResult result =
|
||||
ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
||||
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
||||
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
@ -55,7 +55,7 @@ public class Main {
|
||||
String prompt = "List all cricket world cup teams of 2019.";
|
||||
|
||||
OllamaResult result =
|
||||
ollamaAPI.ask(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
||||
ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
||||
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
@ -97,7 +97,7 @@ public class Main {
|
||||
SamplePrompts.getSampleDatabasePromptWithQuestion(
|
||||
"List all customer names who have bought one or more products");
|
||||
OllamaResult result =
|
||||
ollamaAPI.ask(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
||||
ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ public class AskPhi {
|
||||
.addSeparator()
|
||||
.add("How do I read a file in Go and print its contents to stdout?");
|
||||
|
||||
OllamaResult response = ollamaAPI.ask(model, promptBuilder.build());
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
|
||||
System.out.println(response.getResponse());
|
||||
}
|
||||
}
|
||||
|
@ -329,7 +329,8 @@ public class OllamaAPI {
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask a question to a model running on Ollama server. This is a sync/blocking call.
|
||||
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
||||
* call.
|
||||
*
|
||||
* @param model the ollama model to ask the question to
|
||||
* @param prompt the prompt/question text
|
||||
@ -338,23 +339,23 @@ public class OllamaAPI {
|
||||
* details on the options</a>
|
||||
* @return OllamaResult that includes response text and time taken for response
|
||||
*/
|
||||
public OllamaResult ask(String model, String prompt, Options options)
|
||||
public OllamaResult generate(String model, String prompt, Options options)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return askSync(ollamaRequestModel);
|
||||
return generateSync(ollamaRequestModel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask a question to a model running on Ollama server and get a callback handle that can be used
|
||||
* to check for status and get the response from the model later. This would be an
|
||||
* async/non-blocking call.
|
||||
* Generate response for a question to a model running on Ollama server and get a callback handle
|
||||
* that can be used to check for status and get the response from the model later. This would be
|
||||
* an async/non-blocking call.
|
||||
*
|
||||
* @param model the ollama model to ask the question to
|
||||
* @param prompt the prompt/question text
|
||||
* @return the ollama async result callback handle
|
||||
*/
|
||||
public OllamaAsyncResultCallback askAsync(String model, String prompt) {
|
||||
public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
|
||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
||||
|
||||
URI uri = URI.create(this.host + "/api/generate");
|
||||
@ -377,7 +378,7 @@ public class OllamaAPI {
|
||||
* details on the options</a>
|
||||
* @return OllamaResult that includes response text and time taken for response
|
||||
*/
|
||||
public OllamaResult askWithImageFiles(
|
||||
public OllamaResult generateWithImageFiles(
|
||||
String model, String prompt, List<File> imageFiles, Options options)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
List<String> images = new ArrayList<>();
|
||||
@ -386,7 +387,7 @@ public class OllamaAPI {
|
||||
}
|
||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return askSync(ollamaRequestModel);
|
||||
return generateSync(ollamaRequestModel);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -401,7 +402,7 @@ public class OllamaAPI {
|
||||
* details on the options</a>
|
||||
* @return OllamaResult that includes response text and time taken for response
|
||||
*/
|
||||
public OllamaResult askWithImageURLs(
|
||||
public OllamaResult generateWithImageURLs(
|
||||
String model, String prompt, List<String> imageURLs, Options options)
|
||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||
List<String> images = new ArrayList<>();
|
||||
@ -410,7 +411,7 @@ public class OllamaAPI {
|
||||
}
|
||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return askSync(ollamaRequestModel);
|
||||
return generateSync(ollamaRequestModel);
|
||||
}
|
||||
|
||||
private static String encodeFileToBase64(File file) throws IOException {
|
||||
@ -435,7 +436,7 @@ public class OllamaAPI {
|
||||
}
|
||||
}
|
||||
|
||||
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
|
||||
private OllamaResult generateSync(OllamaRequestModel ollamaRequestModel)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
|
@ -101,7 +101,7 @@ class TestRealAPIs {
|
||||
testEndpointReachability();
|
||||
try {
|
||||
OllamaResult result =
|
||||
ollamaAPI.ask(
|
||||
ollamaAPI.generate(
|
||||
OllamaModelType.LLAMA2,
|
||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||
new OptionsBuilder().build());
|
||||
@ -119,7 +119,7 @@ class TestRealAPIs {
|
||||
testEndpointReachability();
|
||||
try {
|
||||
OllamaResult result =
|
||||
ollamaAPI.ask(
|
||||
ollamaAPI.generate(
|
||||
OllamaModelType.LLAMA2,
|
||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||
new OptionsBuilder().setTemperature(0.9f).build());
|
||||
@ -138,7 +138,7 @@ class TestRealAPIs {
|
||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||
try {
|
||||
OllamaResult result =
|
||||
ollamaAPI.askWithImageFiles(
|
||||
ollamaAPI.generateWithImageFiles(
|
||||
OllamaModelType.LLAVA,
|
||||
"What is in this image?",
|
||||
List.of(imageFile),
|
||||
@ -157,7 +157,7 @@ class TestRealAPIs {
|
||||
testEndpointReachability();
|
||||
try {
|
||||
OllamaResult result =
|
||||
ollamaAPI.askWithImageURLs(
|
||||
ollamaAPI.generateWithImageURLs(
|
||||
OllamaModelType.LLAVA,
|
||||
"What is in this image?",
|
||||
List.of(
|
||||
|
@ -103,10 +103,10 @@ class TestMockedAPIs {
|
||||
String prompt = "some prompt text";
|
||||
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
||||
try {
|
||||
when(ollamaAPI.ask(model, prompt, optionsBuilder.build()))
|
||||
when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
|
||||
.thenReturn(new OllamaResult("", 0, 200));
|
||||
ollamaAPI.ask(model, prompt, optionsBuilder.build());
|
||||
verify(ollamaAPI, times(1)).ask(model, prompt, optionsBuilder.build());
|
||||
ollamaAPI.generate(model, prompt, optionsBuilder.build());
|
||||
verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
|
||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -118,13 +118,14 @@ class TestMockedAPIs {
|
||||
String model = OllamaModelType.LLAMA2;
|
||||
String prompt = "some prompt text";
|
||||
try {
|
||||
when(ollamaAPI.askWithImageFiles(
|
||||
when(ollamaAPI.generateWithImageFiles(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||
.thenReturn(new OllamaResult("", 0, 200));
|
||||
ollamaAPI.askWithImageFiles(
|
||||
ollamaAPI.generateWithImageFiles(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
verify(ollamaAPI, times(1))
|
||||
.askWithImageFiles(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
.generateWithImageFiles(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -136,13 +137,14 @@ class TestMockedAPIs {
|
||||
String model = OllamaModelType.LLAMA2;
|
||||
String prompt = "some prompt text";
|
||||
try {
|
||||
when(ollamaAPI.askWithImageURLs(
|
||||
when(ollamaAPI.generateWithImageURLs(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||
.thenReturn(new OllamaResult("", 0, 200));
|
||||
ollamaAPI.askWithImageURLs(
|
||||
ollamaAPI.generateWithImageURLs(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
verify(ollamaAPI, times(1))
|
||||
.askWithImageURLs(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
.generateWithImageURLs(
|
||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -153,9 +155,9 @@ class TestMockedAPIs {
|
||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||
String model = OllamaModelType.LLAMA2;
|
||||
String prompt = "some prompt text";
|
||||
when(ollamaAPI.askAsync(model, prompt))
|
||||
when(ollamaAPI.generateAsync(model, prompt))
|
||||
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
||||
ollamaAPI.askAsync(model, prompt);
|
||||
verify(ollamaAPI, times(1)).askAsync(model, prompt);
|
||||
ollamaAPI.generateAsync(model, prompt);
|
||||
verify(ollamaAPI, times(1)).generateAsync(model, prompt);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user