forked from Mirror/ollama4j
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1b9d42771 | ||
|
|
1a086c37c0 | ||
|
|
3ed3187ba9 | ||
|
|
b7cd81a7f5 | ||
|
|
e750c2d7f9 | ||
|
|
62f16131f3 | ||
|
|
2cbaf12d7c | ||
|
|
e2d555d404 | ||
|
|
c296b34174 | ||
|
|
e8f99f28ec | ||
|
|
250b1abc79 | ||
|
|
42b15ad93f | ||
|
|
6f7a714bae | ||
|
|
92618e5084 | ||
|
|
391a9242c3 | ||
|
|
e1b6dc3b54 | ||
|
|
04124cf978 | ||
|
|
e4e717b747 |
10
README.md
10
README.md
@@ -110,6 +110,16 @@ make it
|
||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
|
||||
Actions CI workflow.
|
||||
|
||||
#### Who's using Ollama4j?
|
||||
|
||||
- `Datafaker`: a library to generate fake data
|
||||
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
|
||||
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
|
||||
- https://github.com/TEAMPB/ollama4j-vaadin-ui
|
||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
|
||||
server to translate all messages into a specfic target language.
|
||||
- https://github.com/liebki/ollama-translator
|
||||
|
||||
#### Traction
|
||||
|
||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
|
||||
|
||||
@@ -112,7 +112,7 @@ You will get a response similar to:
|
||||
|
||||
## Use a simple Console Output Stream Handler
|
||||
|
||||
```
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
||||
|
||||
public class Main {
|
||||
|
||||
11
pom.xml
11
pom.xml
@@ -4,7 +4,7 @@
|
||||
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.65</version>
|
||||
<version>1.0.70</version>
|
||||
|
||||
<name>Ollama4j</name>
|
||||
<description>Java library for interacting with Ollama API.</description>
|
||||
@@ -39,7 +39,7 @@
|
||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||
<tag>v1.0.65</tag>
|
||||
<tag>v1.0.70</tag>
|
||||
</scm>
|
||||
|
||||
<build>
|
||||
@@ -149,7 +149,12 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.15.3</version>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
||||
@@ -9,18 +9,13 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
@@ -33,16 +28,16 @@ import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/** The base Ollama API class. */
|
||||
/**
|
||||
* The base Ollama API class.
|
||||
*/
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
public class OllamaAPI {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||
private final String host;
|
||||
private long requestTimeoutSeconds = 3;
|
||||
private long requestTimeoutSeconds = 10;
|
||||
private boolean verbose = true;
|
||||
private BasicAuth basicAuth;
|
||||
|
||||
@@ -323,7 +318,7 @@ public class OllamaAPI {
|
||||
* @param modelRequest request for '/api/embeddings' endpoint
|
||||
* @return embeddings
|
||||
*/
|
||||
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{
|
||||
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
|
||||
URI uri = URI.create(this.host + "/api/embeddings");
|
||||
String jsonData = modelRequest.toString();
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
@@ -360,17 +355,17 @@ public class OllamaAPI {
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generate(String model, String prompt, Options options)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
return generate(model, prompt, options,null);
|
||||
return generate(model, prompt, options, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -415,19 +410,19 @@ public class OllamaAPI {
|
||||
}
|
||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generateWithImageFiles(
|
||||
String model, String prompt, List<File> imageFiles, Options options)
|
||||
throws OllamaBaseException, IOException, InterruptedException{
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
return generateWithImageFiles(model, prompt, imageFiles, options, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
|
||||
@@ -451,12 +446,12 @@ public class OllamaAPI {
|
||||
}
|
||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
||||
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
|
||||
@@ -466,7 +461,6 @@ public class OllamaAPI {
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
||||
* 'api/chat'.
|
||||
@@ -478,14 +472,14 @@ public class OllamaAPI {
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException{
|
||||
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
|
||||
return chat(builder.withMessages(messages).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||
*
|
||||
* <p>
|
||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||
*
|
||||
* @param request request object to be sent to the server
|
||||
@@ -494,13 +488,13 @@ public class OllamaAPI {
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException{
|
||||
return chat(request,null);
|
||||
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
|
||||
return chat(request, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||
*
|
||||
* <p>
|
||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||
*
|
||||
* @param request request object to be sent to the server
|
||||
@@ -510,14 +504,13 @@ public class OllamaAPI {
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException{
|
||||
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||
OllamaResult result;
|
||||
if(streamHandler != null){
|
||||
if (streamHandler != null) {
|
||||
request.setStream(true);
|
||||
result = requestCaller.call(request, streamHandler);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
result = requestCaller.callSync(request);
|
||||
}
|
||||
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
@@ -11,7 +13,9 @@ public class Model {
|
||||
private String name;
|
||||
private String model;
|
||||
@JsonProperty("modified_at")
|
||||
private String modifiedAt;
|
||||
private LocalDateTime modifiedAt;
|
||||
@JsonProperty("expires_at")
|
||||
private LocalDateTime expiresAt;
|
||||
private String digest;
|
||||
private long size;
|
||||
@JsonProperty("details")
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OllamaChatResponseModel {
|
||||
private String model;
|
||||
private @JsonProperty("created_at") String createdAt;
|
||||
private @JsonProperty("done_reason") String doneReason;
|
||||
private OllamaChatMessage message;
|
||||
private boolean done;
|
||||
private String error;
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
@@ -15,11 +9,15 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Specialization class for requests
|
||||
*/
|
||||
public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
||||
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
||||
|
||||
@@ -39,12 +37,12 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
||||
try {
|
||||
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
||||
if(streamObserver != null) {
|
||||
if (streamObserver != null) {
|
||||
streamObserver.notify(ollamaResponseModel);
|
||||
}
|
||||
return ollamaResponseModel.isDone();
|
||||
} catch (JsonProcessingException e) {
|
||||
LOG.error("Error parsing the Ollama chat response!",e);
|
||||
LOG.error("Error parsing the Ollama chat response!", e);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
||||
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
||||
return super.callSync(body);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@@ -12,17 +22,6 @@ import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Base64;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
|
||||
/**
|
||||
* Abstract helperclass to call the ollama api server.
|
||||
*/
|
||||
@@ -56,8 +55,7 @@ public abstract class OllamaEndpointCaller {
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{
|
||||
|
||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||
// Create Request
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
@@ -71,7 +69,6 @@ public abstract class OllamaEndpointCaller {
|
||||
HttpResponse<InputStream> response =
|
||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||
|
||||
|
||||
int statusCode = response.statusCode();
|
||||
InputStream responseBodyStream = response.body();
|
||||
StringBuilder responseBuffer = new StringBuilder();
|
||||
@@ -96,7 +93,7 @@ public abstract class OllamaEndpointCaller {
|
||||
OllamaErrorResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else {
|
||||
boolean finished = parseResponseAndAddToBuffer(line,responseBuffer);
|
||||
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
|
||||
if (finished) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -8,10 +8,18 @@ import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
|
||||
public class Utils {
|
||||
|
||||
private static ObjectMapper objectMapper;
|
||||
|
||||
public static ObjectMapper getObjectMapper() {
|
||||
return new ObjectMapper();
|
||||
if(objectMapper == null) {
|
||||
objectMapper = new ObjectMapper();
|
||||
objectMapper.registerModule(new JavaTimeModule());
|
||||
}
|
||||
return objectMapper;
|
||||
}
|
||||
|
||||
public static byte[] loadImageBytesFromUrl(String imageUrl)
|
||||
|
||||
Reference in New Issue
Block a user