Compare commits

..

24 Commits

Author SHA1 Message Date
amithkoujalgi
c296b34174 [maven-release-plugin] prepare release v1.0.68 2024-05-14 05:29:35 +00:00
Amith Koujalgi
e8f99f28ec Updated library usages in README.md 2024-05-14 10:58:29 +05:30
amithkoujalgi
250b1abc79 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:07:20 +00:00
amithkoujalgi
42b15ad93f [maven-release-plugin] prepare release v1.0.67 2024-05-14 05:07:18 +00:00
Amith Koujalgi
6f7a714bae Merge remote-tracking branch 'origin/main' 2024-05-14 10:36:04 +05:30
Amith Koujalgi
92618e5084 Updated OllamaChatResponseModel to include done_reason field. Refer to the Ollama version: https://github.com/ollama/ollama/releases/tag/v0.1.37 2024-05-14 10:35:55 +05:30
amithkoujalgi
391a9242c3 [maven-release-plugin] prepare for next development iteration 2024-05-14 04:59:08 +00:00
amithkoujalgi
e1b6dc3b54 [maven-release-plugin] prepare release v1.0.66 2024-05-14 04:59:07 +00:00
Amith Koujalgi
04124cf978 Updated default request timeout to 10 seconds 2024-05-14 10:27:56 +05:30
amithkoujalgi
e4e717b747 [maven-release-plugin] prepare for next development iteration 2024-05-13 15:36:38 +00:00
amithkoujalgi
10d2a8f5ff [maven-release-plugin] prepare release v1.0.65 2024-05-13 15:36:37 +00:00
Amith Koujalgi
899fa38805 - Updated newly supported Ollama models
- Added `ConsoleOutputStreamHandler`
2024-05-13 21:05:20 +05:30
amithkoujalgi
2df878c953 [maven-release-plugin] prepare for next development iteration 2024-04-22 04:39:41 +00:00
amithkoujalgi
78a5eedc8f [maven-release-plugin] prepare release v1.0.64 2024-04-22 04:39:40 +00:00
Amith Koujalgi
364f961ee2 Merge pull request #37 from anjeongkyun/eddy/add-test1
Adds test case of OllamaChatRequestBuilder
2024-04-22 10:08:32 +05:30
anjeongkyun
b21aa6add2 Adds test of testWithKeepAlive 2024-04-21 23:52:42 +09:00
anjeongkyun
ec4abd1c2d Adds test of testWithStreaming 2024-04-21 23:49:42 +09:00
anjeongkyun
9900ae92fb Adds test of testWithTemplate 2024-04-21 23:43:49 +09:00
anjeongkyun
fa20daf6e5 Adds test case of testRequestWithOptions 2024-04-21 23:37:18 +09:00
amithkoujalgi
44949c0559 [maven-release-plugin] prepare for next development iteration 2024-03-20 10:44:54 +00:00
amithkoujalgi
e88711a017 [maven-release-plugin] prepare release v1.0.63 2024-03-20 10:44:52 +00:00
Amith Koujalgi
32169ded18 Merge pull request #33 from anjeongkyun/fix/prompt-builder-docs 2024-03-20 16:13:42 +05:30
anjeongkyun
4b2d566fd9 Fixes generate method of prompt builder 2024-03-19 21:04:26 +09:00
amithkoujalgi
fb4b7a7ce5 [maven-release-plugin] prepare for next development iteration 2024-03-02 17:49:32 +00:00
11 changed files with 854 additions and 772 deletions

View File

@@ -110,6 +110,13 @@ make it
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
Actions CI workflow.
#### Who's using Ollama4j?
- `Datafaker`: a library to generate fake data
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
- https://github.com/TEAMPB/ollama4j-vaadin-ui
#### Traction
[![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date)

View File

@@ -4,7 +4,7 @@ sidebar_position: 7
# Chat
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
information using the history of already asked questions and the respective answers.
## Create a new conversation and use chat history to augment follow up questions
@@ -20,8 +20,8 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?")
.build();
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
@@ -29,7 +29,7 @@ public class Main {
System.out.println("First answer: " + chatResult.getResponse());
// create next userQuestion
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build();
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
// "continue" conversation with model
chatResult = ollamaAPI.chat(requestModel);
@@ -41,32 +41,38 @@ public class Main {
}
```
You will get a response similar to:
> First answer: Should be Paris!
>
>
> Second answer: Marseille.
>
>
> Chat History:
```json
[ {
"role" : "user",
"content" : "What is the capital of France?",
"images" : [ ]
}, {
"role" : "assistant",
"content" : "Should be Paris!",
"images" : [ ]
}, {
"role" : "user",
"content" : "And what is the second largest city?",
"images" : [ ]
}, {
"role" : "assistant",
"content" : "Marseille.",
"images" : [ ]
} ]
[
{
"role": "user",
"content": "What is the capital of France?",
"images": []
},
{
"role": "assistant",
"content": "Should be Paris!",
"images": []
},
{
"role": "user",
"content": "And what is the second largest city?",
"images": []
},
{
"role": "assistant",
"content": "Marseille.",
"images": []
}
]
```
## Create a conversation where the answer is streamed
@@ -81,30 +87,50 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// define a handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
System.out.println(s);
};
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler);
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
}
}
```
You will get a response similar to:
> The
> The capital
> The capital of
> The capital of France
> The capital of France is
> The capital of France is
> The capital of France is Paris
> The capital of France is Paris.
## Use a simple Console Output Stream Handler
```java
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
public class Main {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler);
}
}
```
## Create a new conversation with individual system prompt
```java
public class Main {
@@ -117,8 +143,8 @@ public class Main {
// create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
@@ -128,6 +154,7 @@ public class Main {
}
```
You will get a response similar to:
> NI.
@@ -139,34 +166,40 @@ public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
// Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse());
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse());
builder.reset();
builder.reset();
// Use history to ask further questions about the image or assistant answer
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
// Use history to ask further questions about the image or assistant answer
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponse());
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponse());
}
}
```
You will get a response similar to:
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early evening, given the warm lighting and the low position of the sun in the sky.
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
> evening, given the warm lighting and the low position of the sun in the sky.
>
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently.
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
> confidently.

View File

@@ -42,7 +42,7 @@ public class AskPhi {
.addSeparator()
.add("How do I read a file in Go and print its contents to stdout?");
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
System.out.println(response.getResponse());
}
}

View File

@@ -4,7 +4,7 @@
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.62</version>
<version>1.0.68</version>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
@@ -39,7 +39,7 @@
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<tag>v1.0.62</tag>
<tag>v1.0.68</tag>
</scm>
<build>

View File

@@ -0,0 +1,14 @@
package io.github.amithkoujalgi.ollama4j.core.impl;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer();
@Override
public void accept(String message) {
String substr = message.substring(response.length());
response.append(substr);
System.out.print(substr);
}
}

View File

@@ -1,14 +1,15 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
import lombok.Data;
@Data
public class OllamaChatResponseModel {
private String model;
private @JsonProperty("created_at") String createdAt;
private @JsonProperty("done_reason") String doneReason;
private OllamaChatMessage message;
private boolean done;
private String error;

View File

@@ -1,12 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
@@ -15,11 +9,15 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Specialization class for requests
*/
public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
@@ -39,14 +37,14 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
try {
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
if(streamObserver != null) {
if (streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!",e);
LOG.error("Error parsing the Ollama chat response!", e);
return true;
}
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
@@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
streamObserver = new OllamaChatStreamObserver(streamHandler);
return super.callSync(body);
}
}

View File

@@ -1,5 +1,15 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
@@ -12,22 +22,11 @@ import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Base64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
/**
* Abstract helperclass to call the ollama api server.
*/
public abstract class OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
private String host;
@@ -49,107 +48,105 @@ public abstract class OllamaEndpointCaller {
/**
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
*
*
* @param body POST body payload
* @return result answer given by the assistant
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen
*/
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
// Create Request
long startTime = System.currentTimeMillis();
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(this.host + getEndpointSuffix());
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
if (this.verbose) LOG.info("Asking model: " + body.toString());
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
long startTime = System.currentTimeMillis();
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(this.host + getEndpointSuffix());
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
if (this.verbose) LOG.info("Asking model: " + body.toString());
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line,responseBuffer);
if (finished) {
break;
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
if (finished) {
break;
}
}
}
}
}
}
if (statusCode != 200) {
LOG.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
long endTime = System.currentTimeMillis();
OllamaResult ollamaResult =
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
if (verbose) LOG.info("Model response: " + ollamaResult);
return ollamaResult;
if (statusCode != 200) {
LOG.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
long endTime = System.currentTimeMillis();
OllamaResult ollamaResult =
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
if (verbose) LOG.info("Model response: " + ollamaResult);
return ollamaResult;
}
}
}
/**
* Get default request builder.
*
* @param uri URI to get a HttpRequest.Builder
* @return HttpRequest.Builder
*/
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder =
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
* Get default request builder.
*
* @param uri URI to get a HttpRequest.Builder
* @return HttpRequest.Builder
*/
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder =
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
}
return requestBuilder;
}
return requestBuilder;
}
/**
* Get basic authentication header value.
*
* @return basic authentication header value (encoded credentials)
*/
private String getBasicAuthHeaderValue() {
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
}
/**
* Get basic authentication header value.
*
* @return basic authentication header value (encoded credentials)
*/
private String getBasicAuthHeaderValue() {
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
}
/**
* Check if Basic Auth credentials set.
*
* @return true when Basic Auth credentials set
*/
private boolean isBasicAuthCredentialsSet() {
return this.basicAuth != null;
}
/**
* Check if Basic Auth credentials set.
*
* @return true when Basic Auth credentials set
*/
private boolean isBasicAuthCredentialsSet() {
return this.basicAuth != null;
}
}

View File

@@ -8,72 +8,75 @@ package io.github.amithkoujalgi.ollama4j.core.types;
*/
@SuppressWarnings("ALL")
public class OllamaModelType {
public static final String GEMMA = "gemma";
public static final String LLAMA2 = "llama2";
public static final String MISTRAL = "mistral";
public static final String MIXTRAL = "mixtral";
public static final String LLAVA = "llava";
public static final String NEURAL_CHAT = "neural-chat";
public static final String CODELLAMA = "codellama";
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
public static final String MISTRAL_OPENORCA = "mistral-openorca";
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
public static final String PHI = "phi";
public static final String ORCA_MINI = "orca-mini";
public static final String DEEPSEEK_CODER = "deepseek-coder";
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String ZEPHYR = "zephyr";
public static final String OPENHERMES = "openhermes";
public static final String QWEN = "qwen";
public static final String WIZARDCODER = "wizardcoder";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String TINYLLAMA = "tinyllama";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String OPENCHAT = "openchat";
public static final String ORCA2 = "orca2";
public static final String FALCON = "falcon";
public static final String WIZARD_MATH = "wizard-math";
public static final String TINYDOLPHIN = "tinydolphin";
public static final String NOUS_HERMES = "nous-hermes";
public static final String YI = "yi";
public static final String DOLPHIN_PHI = "dolphin-phi";
public static final String STARLING_LM = "starling-lm";
public static final String STARCODER = "starcoder";
public static final String CODEUP = "codeup";
public static final String MEDLLAMA2 = "medllama2";
public static final String STABLE_CODE = "stable-code";
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
public static final String BAKLLAVA = "bakllava";
public static final String EVERYTHINGLM = "everythinglm";
public static final String SOLAR = "solar";
public static final String STABLE_BELUGA = "stable-beluga";
public static final String SQLCODER = "sqlcoder";
public static final String YARN_MISTRAL = "yarn-mistral";
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
public static final String MEDITRON = "meditron";
public static final String WIZARD_VICUNA = "wizard-vicuna";
public static final String STABLELM2 = "stablelm2";
public static final String MAGICODER = "magicoder";
public static final String YARN_LLAMA2 = "yarn-llama2";
public static final String NOUS_HERMES2 = "nous-hermes2";
public static final String DEEPSEEK_LLM = "deepseek-llm";
public static final String LLAMA_PRO = "llama-pro";
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
public static final String CODEBOOGA = "codebooga";
public static final String MISTRALLITE = "mistrallite";
public static final String NEXUSRAVEN = "nexusraven";
public static final String GOLIATH = "goliath";
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
public static final String NOTUX = "notux";
public static final String ALFRED = "alfred";
public static final String MEGADOLPHIN = "megadolphin";
public static final String WIZARDLM = "wizardlm";
public static final String XWINLM = "xwinlm";
public static final String NOTUS = "notus";
public static final String DUCKDB_NSQL = "duckdb-nsql";
public static final String ALL_MINILM = "all-minilm";
public static final String GEMMA = "gemma";
public static final String LLAMA2 = "llama2";
public static final String LLAMA3 = "llama3";
public static final String MISTRAL = "mistral";
public static final String MIXTRAL = "mixtral";
public static final String LLAVA = "llava";
public static final String LLAVA_PHI3 = "llava-phi3";
public static final String NEURAL_CHAT = "neural-chat";
public static final String CODELLAMA = "codellama";
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
public static final String MISTRAL_OPENORCA = "mistral-openorca";
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
public static final String PHI = "phi";
public static final String PHI3 = "phi3";
public static final String ORCA_MINI = "orca-mini";
public static final String DEEPSEEK_CODER = "deepseek-coder";
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String ZEPHYR = "zephyr";
public static final String OPENHERMES = "openhermes";
public static final String QWEN = "qwen";
public static final String WIZARDCODER = "wizardcoder";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String TINYLLAMA = "tinyllama";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String OPENCHAT = "openchat";
public static final String ORCA2 = "orca2";
public static final String FALCON = "falcon";
public static final String WIZARD_MATH = "wizard-math";
public static final String TINYDOLPHIN = "tinydolphin";
public static final String NOUS_HERMES = "nous-hermes";
public static final String YI = "yi";
public static final String DOLPHIN_PHI = "dolphin-phi";
public static final String STARLING_LM = "starling-lm";
public static final String STARCODER = "starcoder";
public static final String CODEUP = "codeup";
public static final String MEDLLAMA2 = "medllama2";
public static final String STABLE_CODE = "stable-code";
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
public static final String BAKLLAVA = "bakllava";
public static final String EVERYTHINGLM = "everythinglm";
public static final String SOLAR = "solar";
public static final String STABLE_BELUGA = "stable-beluga";
public static final String SQLCODER = "sqlcoder";
public static final String YARN_MISTRAL = "yarn-mistral";
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
public static final String MEDITRON = "meditron";
public static final String WIZARD_VICUNA = "wizard-vicuna";
public static final String STABLELM2 = "stablelm2";
public static final String MAGICODER = "magicoder";
public static final String YARN_LLAMA2 = "yarn-llama2";
public static final String NOUS_HERMES2 = "nous-hermes2";
public static final String DEEPSEEK_LLM = "deepseek-llm";
public static final String LLAMA_PRO = "llama-pro";
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
public static final String CODEBOOGA = "codebooga";
public static final String MISTRALLITE = "mistrallite";
public static final String NEXUSRAVEN = "nexusraven";
public static final String GOLIATH = "goliath";
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
public static final String NOTUX = "notux";
public static final String ALFRED = "alfred";
public static final String MEGADOLPHIN = "megadolphin";
public static final String WIZARDLM = "wizardlm";
public static final String XWINLM = "xwinlm";
public static final String NOTUS = "notus";
public static final String DUCKDB_NSQL = "duckdb-nsql";
public static final String ALL_MINILM = "all-minilm";
}

View File

@@ -51,12 +51,27 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setMirostat(1).build()).build();
.withOptions(b.setMirostat(1).build())
.withOptions(b.setTemperature(1L).build())
.withOptions(b.setMirostatEta(1L).build())
.withOptions(b.setMirostatTau(1L).build())
.withOptions(b.setNumGpu(1).build())
.withOptions(b.setSeed(1).build())
.withOptions(b.setTopK(1).build())
.withOptions(b.setTopP(1).build())
.build();
String jsonRequest = serializeRequest(req);
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class);
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
assertEquals(1, deserializeRequest.getOptions().get("seed"));
assertEquals(1, deserializeRequest.getOptions().get("top_k"));
assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
}
@Test
@@ -71,4 +86,28 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
String requestFormatProperty = jsonObject.getString("format");
assertEquals("json", requestFormatProperty);
}
@Test
public void testWithTemplate() {
OllamaChatRequestModel req = builder.withTemplate("System Template")
.build();
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req);
}
@Test
public void testWithStreaming() {
OllamaChatRequestModel req = builder.withStreaming().build();
String jsonRequest = serializeRequest(req);
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
}
@Test
public void testWithKeepAlive() {
String expectedKeepAlive = "5m";
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
.build();
String jsonRequest = serializeRequest(req);
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
}
}