forked from Mirror/ollama4j
Compare commits
54 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e33ad1a1e3 | ||
|
|
cd60c506cb | ||
|
|
b55925df28 | ||
|
|
3a9b8c309d | ||
|
|
bf07159522 | ||
|
|
f8ca4d041d | ||
|
|
9c6a55f7b0 | ||
|
|
2866d83a2f | ||
|
|
45e5d07581 | ||
|
|
3a264cb6bb | ||
|
|
e1b9d42771 | ||
|
|
1a086c37c0 | ||
|
|
54edba144c | ||
|
|
3ed3187ba9 | ||
|
|
b7cd81a7f5 | ||
|
|
e750c2d7f9 | ||
|
|
62f16131f3 | ||
|
|
2cbaf12d7c | ||
|
|
e2d555d404 | ||
|
|
c296b34174 | ||
|
|
e8f99f28ec | ||
|
|
250b1abc79 | ||
|
|
42b15ad93f | ||
|
|
6f7a714bae | ||
|
|
92618e5084 | ||
|
|
391a9242c3 | ||
|
|
e1b6dc3b54 | ||
|
|
04124cf978 | ||
|
|
e4e717b747 | ||
|
|
10d2a8f5ff | ||
|
|
899fa38805 | ||
|
|
2df878c953 | ||
|
|
78a5eedc8f | ||
|
|
364f961ee2 | ||
|
|
b21aa6add2 | ||
|
|
ec4abd1c2d | ||
|
|
9900ae92fb | ||
|
|
fa20daf6e5 | ||
|
|
44949c0559 | ||
|
|
e88711a017 | ||
|
|
32169ded18 | ||
|
|
4b2d566fd9 | ||
|
|
fb4b7a7ce5 | ||
|
|
18f27775b0 | ||
|
|
cb462ad05a | ||
|
|
1eec22ca1a | ||
|
|
c1f3c51f88 | ||
|
|
7dd556293f | ||
|
|
ee50131ce4 | ||
|
|
2cd47dbfaa | ||
|
|
e5296c1067 | ||
|
|
0f00f05e3d | ||
|
|
976a3b82e5 | ||
|
|
ba26d620c4 |
18
.github/workflows/maven-publish.yml
vendored
18
.github/workflows/maven-publish.yml
vendored
@@ -12,23 +12,12 @@ on:
|
||||
branches: [ "main" ]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
@@ -77,10 +66,3 @@ jobs:
|
||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
||||
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
|
||||
- name: Release Assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: target/*.jar
|
||||
31
README.md
31
README.md
@@ -67,10 +67,29 @@ In your Maven project, add this dependency:
|
||||
<dependency>
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.57</version>
|
||||
<version>1.0.70</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL:
|
||||
|
||||
```kotlin
|
||||
dependencies {
|
||||
|
||||
val ollama4jVersion = "1.0.70"
|
||||
|
||||
implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion")
|
||||
}
|
||||
```
|
||||
|
||||
```groovy
|
||||
dependencies {
|
||||
implementation("io.github.amithkoujalgi:ollama4j:1.0.70")
|
||||
}
|
||||
```
|
||||
|
||||
Latest release:
|
||||
|
||||

|
||||
@@ -110,6 +129,16 @@ make it
|
||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
|
||||
Actions CI workflow.
|
||||
|
||||
#### Who's using Ollama4j?
|
||||
|
||||
- `Datafaker`: a library to generate fake data
|
||||
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
|
||||
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
|
||||
- https://github.com/TEAMPB/ollama4j-vaadin-ui
|
||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
|
||||
server to translate all messages into a specfic target language.
|
||||
- https://github.com/liebki/ollama-translator
|
||||
|
||||
#### Traction
|
||||
|
||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
|
||||
|
||||
@@ -41,6 +41,7 @@ public class Main {
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> First answer: Should be Paris!
|
||||
@@ -50,23 +51,28 @@ You will get a response similar to:
|
||||
> Chat History:
|
||||
|
||||
```json
|
||||
[ {
|
||||
[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?",
|
||||
"images": []
|
||||
}, {
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "Should be Paris!",
|
||||
"images": []
|
||||
}, {
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "And what is the second largest city?",
|
||||
"images": []
|
||||
}, {
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "Marseille.",
|
||||
"images": []
|
||||
} ]
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Create a conversation where the answer is streamed
|
||||
@@ -93,6 +99,7 @@ public class Main {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> The
|
||||
@@ -103,8 +110,27 @@ You will get a response similar to:
|
||||
> The capital of France is Paris
|
||||
> The capital of France is Paris.
|
||||
|
||||
## Use a simple Console Output Stream Handler
|
||||
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) throws Exception {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
|
||||
.build();
|
||||
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
|
||||
ollamaAPI.chat(requestModel, streamHandler);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Create a new conversation with individual system prompt
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
@@ -128,6 +154,7 @@ public class Main {
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> NI.
|
||||
@@ -167,6 +194,12 @@ public class Main {
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early evening, given the warm lighting and the low position of the sun in the sky.
|
||||
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
|
||||
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
|
||||
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
|
||||
> evening, given the warm lighting and the low position of the sun in the sky.
|
||||
>
|
||||
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently.
|
||||
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
|
||||
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
|
||||
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
|
||||
> confidently.
|
||||
@@ -42,7 +42,7 @@ public class AskPhi {
|
||||
.addSeparator()
|
||||
.add("How do I read a file in Go and print its contents to stdout?");
|
||||
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
|
||||
System.out.println(response.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
11
pom.xml
11
pom.xml
@@ -4,7 +4,7 @@
|
||||
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.59</version>
|
||||
<version>1.0.72</version>
|
||||
|
||||
<name>Ollama4j</name>
|
||||
<description>Java library for interacting with Ollama API.</description>
|
||||
@@ -39,7 +39,7 @@
|
||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||
<tag>v1.0.59</tag>
|
||||
<tag>v1.0.72</tag>
|
||||
</scm>
|
||||
|
||||
<build>
|
||||
@@ -149,7 +149,12 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.15.3</version>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
||||
@@ -9,18 +9,13 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
@@ -33,16 +28,16 @@ import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/** The base Ollama API class. */
|
||||
/**
|
||||
* The base Ollama API class.
|
||||
*/
|
||||
@SuppressWarnings("DuplicatedCode")
|
||||
public class OllamaAPI {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||
private final String host;
|
||||
private long requestTimeoutSeconds = 3;
|
||||
private long requestTimeoutSeconds = 10;
|
||||
private boolean verbose = true;
|
||||
private BasicAuth basicAuth;
|
||||
|
||||
@@ -365,7 +360,7 @@ public class OllamaAPI {
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generate(String model, String prompt, Options options)
|
||||
@@ -420,7 +415,7 @@ public class OllamaAPI {
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generateWithImageFiles(
|
||||
@@ -456,7 +451,7 @@ public class OllamaAPI {
|
||||
|
||||
/**
|
||||
* Convenience method to call Ollama API without streaming responses.
|
||||
*
|
||||
* <p>
|
||||
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
|
||||
*/
|
||||
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
|
||||
@@ -466,7 +461,6 @@ public class OllamaAPI {
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
||||
* 'api/chat'.
|
||||
@@ -485,7 +479,7 @@ public class OllamaAPI {
|
||||
|
||||
/**
|
||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||
*
|
||||
* <p>
|
||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||
*
|
||||
* @param request request object to be sent to the server
|
||||
@@ -500,7 +494,7 @@ public class OllamaAPI {
|
||||
|
||||
/**
|
||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||
*
|
||||
* <p>
|
||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||
*
|
||||
* @param request request object to be sent to the server
|
||||
@@ -516,8 +510,7 @@ public class OllamaAPI {
|
||||
if (streamHandler != null) {
|
||||
request.setStream(true);
|
||||
result = requestCaller.call(request, streamHandler);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
result = requestCaller.callSync(request);
|
||||
}
|
||||
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.impl;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
||||
|
||||
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
|
||||
private final StringBuffer response = new StringBuffer();
|
||||
|
||||
@Override
|
||||
public void accept(String message) {
|
||||
String substr = message.substring(response.length());
|
||||
response.append(substr);
|
||||
System.out.print(substr);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.OffsetDateTime;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
@@ -11,7 +14,9 @@ public class Model {
|
||||
private String name;
|
||||
private String model;
|
||||
@JsonProperty("modified_at")
|
||||
private String modifiedAt;
|
||||
private OffsetDateTime modifiedAt;
|
||||
@JsonProperty("expires_at")
|
||||
private OffsetDateTime expiresAt;
|
||||
private String digest;
|
||||
private long size;
|
||||
@JsonProperty("details")
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OllamaChatResponseModel {
|
||||
private String model;
|
||||
private @JsonProperty("created_at") String createdAt;
|
||||
private @JsonProperty("done_reason") String doneReason;
|
||||
private OllamaChatMessage message;
|
||||
private boolean done;
|
||||
private String error;
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
@@ -15,6 +9,10 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Specialization class for requests
|
||||
@@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
||||
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
||||
return super.callSync(body);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@@ -12,17 +22,6 @@ import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Base64;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
|
||||
/**
|
||||
* Abstract helperclass to call the ollama api server.
|
||||
*/
|
||||
@@ -57,7 +56,6 @@ public abstract class OllamaEndpointCaller {
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||
|
||||
// Create Request
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
@@ -71,7 +69,6 @@ public abstract class OllamaEndpointCaller {
|
||||
HttpResponse<InputStream> response =
|
||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||
|
||||
|
||||
int statusCode = response.statusCode();
|
||||
InputStream responseBodyStream = response.body();
|
||||
StringBuilder responseBuffer = new StringBuilder();
|
||||
|
||||
@@ -10,15 +10,18 @@ package io.github.amithkoujalgi.ollama4j.core.types;
|
||||
public class OllamaModelType {
|
||||
public static final String GEMMA = "gemma";
|
||||
public static final String LLAMA2 = "llama2";
|
||||
public static final String LLAMA3 = "llama3";
|
||||
public static final String MISTRAL = "mistral";
|
||||
public static final String MIXTRAL = "mixtral";
|
||||
public static final String LLAVA = "llava";
|
||||
public static final String LLAVA_PHI3 = "llava-phi3";
|
||||
public static final String NEURAL_CHAT = "neural-chat";
|
||||
public static final String CODELLAMA = "codellama";
|
||||
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
||||
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
||||
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
||||
public static final String PHI = "phi";
|
||||
public static final String PHI3 = "phi3";
|
||||
public static final String ORCA_MINI = "orca-mini";
|
||||
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
||||
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
|
||||
|
||||
@@ -8,10 +8,18 @@ import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
|
||||
public class Utils {
|
||||
|
||||
private static ObjectMapper objectMapper;
|
||||
|
||||
public static ObjectMapper getObjectMapper() {
|
||||
return new ObjectMapper();
|
||||
if(objectMapper == null) {
|
||||
objectMapper = new ObjectMapper();
|
||||
objectMapper.registerModule(new JavaTimeModule());
|
||||
}
|
||||
return objectMapper;
|
||||
}
|
||||
|
||||
public static byte[] loadImageBytesFromUrl(String imageUrl)
|
||||
|
||||
@@ -6,30 +6,30 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
|
||||
public abstract class AbstractRequestSerializationTest<T> {
|
||||
public abstract class AbstractSerializationTest<T> {
|
||||
|
||||
protected ObjectMapper mapper = Utils.getObjectMapper();
|
||||
|
||||
protected String serializeRequest(T req) {
|
||||
protected String serialize(T obj) {
|
||||
try {
|
||||
return mapper.writeValueAsString(req);
|
||||
return mapper.writeValueAsString(obj);
|
||||
} catch (JsonProcessingException e) {
|
||||
fail("Could not serialize request!", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
|
||||
protected T deserialize(String jsonObject, Class<T> deserializationClass) {
|
||||
try {
|
||||
return mapper.readValue(jsonRequest, requestClass);
|
||||
return mapper.readValue(jsonObject, deserializationClass);
|
||||
} catch (JsonProcessingException e) {
|
||||
fail("Could not deserialize jsonRequest!", e);
|
||||
fail("Could not deserialize jsonObject!", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
|
||||
protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
|
||||
T req) {
|
||||
assertEquals(req, unmarshalledRequest);
|
||||
assertEquals(req, unmarshalledObject);
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilde
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
|
||||
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
|
||||
|
||||
private OllamaChatRequestBuilder builder;
|
||||
|
||||
@@ -26,8 +26,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
||||
@Test
|
||||
public void testRequestOnlyMandatoryFields() {
|
||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
|
||||
String jsonRequest = serializeRequest(req);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -35,28 +35,43 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
|
||||
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||
.build();
|
||||
String jsonRequest = serializeRequest(req);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequestWithMessageAndImage() {
|
||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
|
||||
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
|
||||
String jsonRequest = serializeRequest(req);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequestWithOptions() {
|
||||
OptionsBuilder b = new OptionsBuilder();
|
||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||
.withOptions(b.setMirostat(1).build()).build();
|
||||
.withOptions(b.setMirostat(1).build())
|
||||
.withOptions(b.setTemperature(1L).build())
|
||||
.withOptions(b.setMirostatEta(1L).build())
|
||||
.withOptions(b.setMirostatTau(1L).build())
|
||||
.withOptions(b.setNumGpu(1).build())
|
||||
.withOptions(b.setSeed(1).build())
|
||||
.withOptions(b.setTopK(1).build())
|
||||
.withOptions(b.setTopP(1).build())
|
||||
.build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class);
|
||||
String jsonRequest = serialize(req);
|
||||
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
|
||||
assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
|
||||
assertEquals(1, deserializeRequest.getOptions().get("seed"));
|
||||
assertEquals(1, deserializeRequest.getOptions().get("top_k"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -64,11 +79,35 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||
.withGetJsonResponse().build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
String jsonRequest = serialize(req);
|
||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||
// of request is never used in real code anyways
|
||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||
String requestFormatProperty = jsonObject.getString("format");
|
||||
assertEquals("json", requestFormatProperty);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithTemplate() {
|
||||
OllamaChatRequestModel req = builder.withTemplate("System Template")
|
||||
.build();
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithStreaming() {
|
||||
OllamaChatRequestModel req = builder.withStreaming().build();
|
||||
String jsonRequest = serialize(req);
|
||||
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithKeepAlive() {
|
||||
String expectedKeepAlive = "5m";
|
||||
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
|
||||
.build();
|
||||
String jsonRequest = serialize(req);
|
||||
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsR
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
|
||||
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
|
||||
|
||||
private OllamaEmbeddingsRequestBuilder builder;
|
||||
|
||||
@@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
||||
@Test
|
||||
public void testRequestOnlyMandatoryFields() {
|
||||
OllamaEmbeddingsRequestModel req = builder.build();
|
||||
String jsonRequest = serializeRequest(req);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
||||
OllamaEmbeddingsRequestModel req = builder
|
||||
.withOptions(b.setMirostat(1).build()).build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
||||
String jsonRequest = serialize(req);
|
||||
OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateReque
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
|
||||
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
|
||||
|
||||
private OllamaGenerateRequestBuilder builder;
|
||||
|
||||
@@ -24,8 +24,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
||||
public void testRequestOnlyMandatoryFields() {
|
||||
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
|
||||
String jsonRequest = serialize(req);
|
||||
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -34,8 +34,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
||||
OllamaGenerateRequestModel req =
|
||||
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
|
||||
String jsonRequest = serialize(req);
|
||||
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
|
||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||
}
|
||||
@@ -45,7 +45,7 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
||||
OllamaGenerateRequestModel req =
|
||||
builder.withPrompt("Some prompt").withGetJsonResponse().build();
|
||||
|
||||
String jsonRequest = serializeRequest(req);
|
||||
String jsonRequest = serialize(req);
|
||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||
// of request is never used in real code anyways
|
||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.Model;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
|
||||
|
||||
@Test
|
||||
public void testDeserializationOfModelResponseWithOffsetTime(){
|
||||
String serializedTestStringWithOffsetTime = "{\n"
|
||||
+ "\"name\": \"codellama:13b\",\n"
|
||||
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n"
|
||||
+ "\"size\": 7365960935,\n"
|
||||
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||
+ "\"details\": {\n"
|
||||
+ "\"format\": \"gguf\",\n"
|
||||
+ "\"family\": \"llama\",\n"
|
||||
+ "\"families\": null,\n"
|
||||
+ "\"parameter_size\": \"13B\",\n"
|
||||
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||
+ "}}";
|
||||
deserialize(serializedTestStringWithOffsetTime,Model.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializationOfModelResponseWithZuluTime(){
|
||||
String serializedTestStringWithZuluTimezone = "{\n"
|
||||
+ "\"name\": \"codellama:13b\",\n"
|
||||
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n"
|
||||
+ "\"size\": 7365960935,\n"
|
||||
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||
+ "\"details\": {\n"
|
||||
+ "\"format\": \"gguf\",\n"
|
||||
+ "\"family\": \"llama\",\n"
|
||||
+ "\"families\": null,\n"
|
||||
+ "\"parameter_size\": \"13B\",\n"
|
||||
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||
+ "}}";
|
||||
deserialize(serializedTestStringWithZuluTimezone,Model.class);
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user