mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-05-15 11:57:12 +02:00
clean up
This commit is contained in:
parent
798f881b6c
commit
1fdd89f50f
@ -44,13 +44,13 @@ for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
|
|||||||
[![][ollama-shield]][ollama] Or [![][ollama-docker-shield]][ollama-docker]
|
[![][ollama-shield]][ollama] Or [![][ollama-docker-shield]][ollama-docker]
|
||||||
|
|
||||||
[ollama]: https://ollama.ai/
|
[ollama]: https://ollama.ai/
|
||||||
|
|
||||||
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
|
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
|
||||||
|
|
||||||
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
|
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
|
||||||
|
|
||||||
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
|
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#### Installation
|
#### Installation
|
||||||
|
|
||||||
In your Maven project, add this dependency available in
|
In your Maven project, add this dependency available in
|
||||||
@ -452,7 +452,7 @@ make it
|
|||||||
- [x] Fix deprecated HTTP client code
|
- [x] Fix deprecated HTTP client code
|
||||||
- [x] Setup logging
|
- [x] Setup logging
|
||||||
- [x] Use lombok
|
- [x] Use lombok
|
||||||
- [ ] Update request body creation with Java objects
|
- [x] Update request body creation with Java objects
|
||||||
- [ ] Async APIs for images
|
- [ ] Async APIs for images
|
||||||
- [ ] Add additional params for `ask` APIs such as:
|
- [ ] Add additional params for `ask` APIs such as:
|
||||||
- `options`: additional model parameters for the Modelfile such as `temperature`
|
- `options`: additional model parameters for the Modelfile such as `temperature`
|
||||||
|
@ -2,6 +2,10 @@ package io.github.amithkoujalgi.ollama4j.core;
|
|||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.*;
|
import io.github.amithkoujalgi.ollama4j.core.models.*;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
@ -17,7 +21,6 @@ import java.net.http.HttpRequest;
|
|||||||
import java.net.http.HttpResponse;
|
import java.net.http.HttpResponse;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
@ -25,9 +28,7 @@ import java.util.List;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/** The base Ollama API class. */
|
||||||
* The base Ollama API class.
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("DuplicatedCode")
|
@SuppressWarnings("DuplicatedCode")
|
||||||
public class OllamaAPI {
|
public class OllamaAPI {
|
||||||
|
|
||||||
@ -71,15 +72,21 @@ public class OllamaAPI {
|
|||||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
String url = this.host + "/api/tags";
|
String url = this.host + "/api/tags";
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
HttpRequest httpRequest = HttpRequest.newBuilder().uri(new URI(url))
|
HttpRequest httpRequest =
|
||||||
.header("Accept", "application/json").header("Content-type", "application/json")
|
HttpRequest.newBuilder()
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds)).GET().build();
|
.uri(new URI(url))
|
||||||
HttpResponse<String> response = httpClient.send(httpRequest,
|
.header("Accept", "application/json")
|
||||||
HttpResponse.BodyHandlers.ofString());
|
.header("Content-type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.GET()
|
||||||
|
.build();
|
||||||
|
HttpResponse<String> response =
|
||||||
|
httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
String responseString = response.body();
|
String responseString = response.body();
|
||||||
if (statusCode == 200) {
|
if (statusCode == 200) {
|
||||||
return Utils.getObjectMapper().readValue(responseString, ListModelsResponse.class)
|
return Utils.getObjectMapper()
|
||||||
|
.readValue(responseString, ListModelsResponse.class)
|
||||||
.getModels();
|
.getModels();
|
||||||
} else {
|
} else {
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseString);
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
@ -90,28 +97,32 @@ public class OllamaAPI {
|
|||||||
* Pull a model on the Ollama server from the list of <a
|
* Pull a model on the Ollama server from the list of <a
|
||||||
* href="https://ollama.ai/library">available models</a>.
|
* href="https://ollama.ai/library">available models</a>.
|
||||||
*
|
*
|
||||||
* @param model the name of the model
|
* @param modelName the name of the model
|
||||||
*/
|
*/
|
||||||
public void pullModel(String model)
|
public void pullModel(String modelName)
|
||||||
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
|
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
|
||||||
String url = this.host + "/api/pull";
|
String url = this.host + "/api/pull";
|
||||||
String jsonData = String.format("{\"name\": \"%s\"}", model);
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
HttpRequest request = HttpRequest.newBuilder().uri(new URI(url))
|
HttpRequest request =
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData)).header("Accept", "application/json")
|
HttpRequest.newBuilder()
|
||||||
|
.uri(new URI(url))
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
|
.header("Accept", "application/json")
|
||||||
.header("Content-type", "application/json")
|
.header("Content-type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds)).build();
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.build();
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
HttpResponse<InputStream> response = client.send(request,
|
HttpResponse<InputStream> response =
|
||||||
HttpResponse.BodyHandlers.ofInputStream());
|
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
InputStream responseBodyStream = response.body();
|
InputStream responseBodyStream = response.body();
|
||||||
String responseString = "";
|
String responseString = "";
|
||||||
try (BufferedReader reader = new BufferedReader(
|
try (BufferedReader reader =
|
||||||
new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
String line;
|
String line;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
ModelPullResponse modelPullResponse = Utils.getObjectMapper()
|
ModelPullResponse modelPullResponse =
|
||||||
.readValue(line, ModelPullResponse.class);
|
Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
logger.info(modelPullResponse.getStatus());
|
logger.info(modelPullResponse.getStatus());
|
||||||
}
|
}
|
||||||
@ -131,11 +142,15 @@ public class OllamaAPI {
|
|||||||
public ModelDetail getModelDetails(String modelName)
|
public ModelDetail getModelDetails(String modelName)
|
||||||
throws IOException, OllamaBaseException, InterruptedException {
|
throws IOException, OllamaBaseException, InterruptedException {
|
||||||
String url = this.host + "/api/show";
|
String url = this.host + "/api/show";
|
||||||
String jsonData = String.format("{\"name\": \"%s\"}", modelName);
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
HttpRequest request = HttpRequest.newBuilder().uri(URI.create(url))
|
HttpRequest request =
|
||||||
.header("Accept", "application/json").header("Content-type", "application/json")
|
HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData)).build();
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
|
.build();
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
@ -154,15 +169,18 @@ public class OllamaAPI {
|
|||||||
* @param modelName the name of the custom model to be created.
|
* @param modelName the name of the custom model to be created.
|
||||||
* @param modelFilePath the path to model file that exists on the Ollama server.
|
* @param modelFilePath the path to model file that exists on the Ollama server.
|
||||||
*/
|
*/
|
||||||
public void createModel(String modelName, String modelFilePath)
|
public void createModelWithFilePath(String modelName, String modelFilePath)
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
throws IOException, InterruptedException, OllamaBaseException {
|
||||||
String url = this.host + "/api/create";
|
String url = this.host + "/api/create";
|
||||||
String jsonData = String.format("{\"name\": \"%s\", \"path\": \"%s\"}", modelName,
|
String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString();
|
||||||
modelFilePath);
|
HttpRequest request =
|
||||||
HttpRequest request = HttpRequest.newBuilder().uri(URI.create(url))
|
HttpRequest.newBuilder()
|
||||||
.header("Accept", "application/json").header("Content-Type", "application/json")
|
.uri(URI.create(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build();
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
|
.build();
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
@ -180,21 +198,59 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a custom model from a model file. Read more about custom model file creation <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
|
||||||
|
*
|
||||||
|
* @param modelName the name of the custom model to be created.
|
||||||
|
* @param modelFileContents the path to model file that exists on the Ollama server.
|
||||||
|
*/
|
||||||
|
public void createModelWithModelFileContents(String modelName, String modelFileContents)
|
||||||
|
throws IOException, InterruptedException, OllamaBaseException {
|
||||||
|
String url = this.host + "/api/create";
|
||||||
|
String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseString = response.body();
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
if (responseString.contains("error")) {
|
||||||
|
throw new OllamaBaseException(responseString);
|
||||||
|
}
|
||||||
|
if (verbose) {
|
||||||
|
logger.info(responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete a model from Ollama server.
|
* Delete a model from Ollama server.
|
||||||
*
|
*
|
||||||
* @param name the name of the model to be deleted.
|
* @param modelName the name of the model to be deleted.
|
||||||
* @param ignoreIfNotPresent - ignore errors if the specified model is not present on Ollama
|
* @param ignoreIfNotPresent - ignore errors if the specified model is not present on Ollama
|
||||||
* server.
|
* server.
|
||||||
*/
|
*/
|
||||||
public void deleteModel(String name, boolean ignoreIfNotPresent)
|
public void deleteModel(String modelName, boolean ignoreIfNotPresent)
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
throws IOException, InterruptedException, OllamaBaseException {
|
||||||
String url = this.host + "/api/delete";
|
String url = this.host + "/api/delete";
|
||||||
String jsonData = String.format("{\"name\": \"%s\"}", name);
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
HttpRequest request = HttpRequest.newBuilder().uri(URI.create(url))
|
HttpRequest request =
|
||||||
|
HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(url))
|
||||||
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
.header("Accept", "application/json").header("Content-type", "application/json")
|
.header("Accept", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds)).build();
|
.header("Content-type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.build();
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
@ -217,18 +273,22 @@ public class OllamaAPI {
|
|||||||
public List<Double> generateEmbeddings(String model, String prompt)
|
public List<Double> generateEmbeddings(String model, String prompt)
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
throws IOException, InterruptedException, OllamaBaseException {
|
||||||
String url = this.host + "/api/embeddings";
|
String url = this.host + "/api/embeddings";
|
||||||
String jsonData = String.format("{\"model\": \"%s\", \"prompt\": \"%s\"}", model, prompt);
|
String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
HttpRequest request = HttpRequest.newBuilder().uri(URI.create(url))
|
HttpRequest request =
|
||||||
.header("Accept", "application/json").header("Content-type", "application/json")
|
HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData)).build();
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
|
.build();
|
||||||
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
|
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
String responseBody = response.body();
|
String responseBody = response.body();
|
||||||
if (statusCode == 200) {
|
if (statusCode == 200) {
|
||||||
EmbeddingResponse embeddingResponse = Utils.getObjectMapper()
|
EmbeddingResponse embeddingResponse =
|
||||||
.readValue(responseBody, EmbeddingResponse.class);
|
Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
|
||||||
return embeddingResponse.getEmbedding();
|
return embeddingResponse.getEmbedding();
|
||||||
} else {
|
} else {
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
||||||
@ -248,6 +308,25 @@ public class OllamaAPI {
|
|||||||
return askSync(ollamaRequestModel);
|
return askSync(ollamaRequestModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model running on Ollama server and get a callback handle that can be used
|
||||||
|
* to check for status and get the response from the model later. This would be an
|
||||||
|
* async/non-blocking call.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param promptText the prompt/question text
|
||||||
|
* @return the ollama async result callback handle
|
||||||
|
*/
|
||||||
|
public OllamaAsyncResultCallback askAsync(String model, String promptText) {
|
||||||
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
URI uri = URI.create(this.host + "/api/generate");
|
||||||
|
OllamaAsyncResultCallback ollamaAsyncResultCallback =
|
||||||
|
new OllamaAsyncResultCallback(httpClient, uri, ollamaRequestModel, requestTimeoutSeconds);
|
||||||
|
ollamaAsyncResultCallback.start();
|
||||||
|
return ollamaAsyncResultCallback;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* With one or more image files, ask a question to a model running on Ollama server. This is a
|
* With one or more image files, ask a question to a model running on Ollama server. This is a
|
||||||
* sync/blocking call.
|
* sync/blocking call.
|
||||||
@ -286,18 +365,19 @@ public class OllamaAPI {
|
|||||||
return askSync(ollamaRequestModel);
|
return askSync(ollamaRequestModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String encodeFileToBase64(File file) throws IOException {
|
private static String encodeFileToBase64(File file) throws IOException {
|
||||||
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String encodeByteArrayToBase64(byte[] bytes) {
|
private static String encodeByteArrayToBase64(byte[] bytes) {
|
||||||
return Base64.getEncoder().encodeToString(bytes);
|
return Base64.getEncoder().encodeToString(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static byte[] loadImageBytesFromUrl(String imageUrl)
|
private static byte[] loadImageBytesFromUrl(String imageUrl)
|
||||||
throws IOException, URISyntaxException {
|
throws IOException, URISyntaxException {
|
||||||
URL url = new URI(imageUrl).toURL();
|
URL url = new URI(imageUrl).toURL();
|
||||||
try (InputStream in = url.openStream(); ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
try (InputStream in = url.openStream();
|
||||||
|
ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
||||||
byte[] buffer = new byte[1024];
|
byte[] buffer = new byte[1024];
|
||||||
int bytesRead;
|
int bytesRead;
|
||||||
while ((bytesRead = in.read(buffer)) != -1) {
|
while ((bytesRead = in.read(buffer)) != -1) {
|
||||||
@ -307,50 +387,35 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Ask a question to a model running on Ollama server and get a callback handle that can be used
|
|
||||||
* to check for status and get the response from the model later. This would be an
|
|
||||||
* async/non-blocking call.
|
|
||||||
*
|
|
||||||
* @param model the ollama model to ask the question to
|
|
||||||
* @param promptText the prompt/question text
|
|
||||||
* @return the ollama async result callback handle
|
|
||||||
*/
|
|
||||||
public OllamaAsyncResultCallback askAsync(String model, String promptText) {
|
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
|
||||||
OllamaAsyncResultCallback ollamaAsyncResultCallback = new OllamaAsyncResultCallback(httpClient,
|
|
||||||
uri, ollamaRequestModel, requestTimeoutSeconds);
|
|
||||||
ollamaAsyncResultCallback.start();
|
|
||||||
return ollamaAsyncResultCallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
|
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
URI uri = URI.create(this.host + "/api/generate");
|
||||||
HttpRequest request = HttpRequest.newBuilder(uri).POST(HttpRequest.BodyPublishers.ofString(
|
HttpRequest request =
|
||||||
|
HttpRequest.newBuilder(uri)
|
||||||
|
.POST(
|
||||||
|
HttpRequest.BodyPublishers.ofString(
|
||||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
||||||
.header("Content-Type", "application/json")
|
.header("Content-Type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds)).build();
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
HttpResponse<InputStream> response = httpClient.send(request,
|
.build();
|
||||||
HttpResponse.BodyHandlers.ofInputStream());
|
HttpResponse<InputStream> response =
|
||||||
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
InputStream responseBodyStream = response.body();
|
InputStream responseBodyStream = response.body();
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
try (BufferedReader reader = new BufferedReader(
|
try (BufferedReader reader =
|
||||||
new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
String line;
|
String line;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
if (statusCode == 404) {
|
if (statusCode == 404) {
|
||||||
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper()
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
.readValue(line, OllamaErrorResponseModel.class);
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
} else {
|
} else {
|
||||||
OllamaResponseModel ollamaResponseModel = Utils.getObjectMapper()
|
OllamaResponseModel ollamaResponseModel =
|
||||||
.readValue(line, OllamaResponseModel.class);
|
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
||||||
if (!ollamaResponseModel.isDone()) {
|
if (!ollamaResponseModel.isDone()) {
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,20 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import java.util.Map;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
public class ModelDetail {
|
public class ModelDetail {
|
||||||
private String license;
|
private String license;
|
||||||
|
|
||||||
@JsonProperty("modelfile")
|
@JsonProperty("modelfile")
|
||||||
private String modelFile;
|
private String modelFile;
|
||||||
|
|
||||||
private String parameters;
|
private String parameters;
|
||||||
private String template;
|
private String template;
|
||||||
private String system;
|
private String system;
|
||||||
|
private Map<String, String> details;
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class CustomModelFileContentsRequest {
|
||||||
|
private String name;
|
||||||
|
private String modelfile;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class CustomModelFilePathRequest {
|
||||||
|
private String name;
|
||||||
|
private String path;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class ModelEmbeddingsRequest {
|
||||||
|
private String model;
|
||||||
|
private String prompt;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class ModelRequest {
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -44,11 +44,11 @@ class TestMockedAPIs {
|
|||||||
void testCreateModel() {
|
void testCreateModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String modelFilePath = "/somemodel";
|
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).createModel(model, modelFilePath);
|
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
|
||||||
ollamaAPI.createModel(model, modelFilePath);
|
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
|
||||||
verify(ollamaAPI, times(1)).createModel(model, modelFilePath);
|
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user