Compare commits

..

140 Commits

Author SHA1 Message Date
amithkoujalgi
bb0785140b [maven-release-plugin] prepare for next development iteration 2024-05-20 14:55:02 +00:00
amithkoujalgi
e33ad1a1e3 [maven-release-plugin] prepare release v1.0.72 2024-05-20 14:55:01 +00:00
Amith Koujalgi
cd60c506cb Merge pull request #47 from kelvinwatson/kelvinwatson/gradleDependencyInReadMe
Update README to include gradle project set up options
2024-05-20 20:24:03 +05:30
amithkoujalgi
b55925df28 [maven-release-plugin] prepare for next development iteration 2024-05-20 14:53:55 +00:00
amithkoujalgi
3a9b8c309d [maven-release-plugin] prepare release v1.0.71 2024-05-20 14:53:54 +00:00
Amith Koujalgi
bf07159522 Merge pull request #49 from AgentSchmecker/bugfix/48
Changes Datatype of ModelResponse fields to OffsetTime
2024-05-20 20:22:56 +05:30
AgentSchmecker
f8ca4d041d Changes DateTime types of Model.java to OffsetDatetime
Fixes #48
2024-05-20 11:10:03 +00:00
AgentSchmecker
9c6a55f7b0 Generalizes Abstract Serialization Test Class
Removes the "Request" naming context as this base class technically serves for general serialization purposes.
2024-05-20 11:08:49 +00:00
Kelvin Watson
2866d83a2f Update README.md 2024-05-19 11:58:08 -07:00
Kelvin Watson
45e5d07581 update README to include gradle options 2024-05-19 11:57:09 -07:00
amithkoujalgi
3a264cb6bb [maven-release-plugin] prepare for next development iteration 2024-05-19 13:57:34 +00:00
amithkoujalgi
e1b9d42771 [maven-release-plugin] prepare release v1.0.70 2024-05-19 13:57:32 +00:00
Amith Koujalgi
1a086c37c0 Merge pull request #46 from AgentSchmecker/model_update
Updates Model.java to be up to date with current OllamaAPI
2024-05-19 19:26:28 +05:30
Markus Klenke
54edba144c Merge pull request #2 from AgentSchmecker/model_update
Updates Model.java to be up to date with current OllamaAPI
2024-05-17 00:09:15 +02:00
AgentSchmecker
3ed3187ba9 Updates Model.java to be up to date with current OllamaAPI
Also adds Jackson-JSR310 for java.time JSON Mapping
2024-05-16 22:00:11 +00:00
amithkoujalgi
b7cd81a7f5 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:42:56 +00:00
amithkoujalgi
e750c2d7f9 [maven-release-plugin] prepare release v1.0.69 2024-05-14 05:42:55 +00:00
Amith Koujalgi
62f16131f3 Merge remote-tracking branch 'origin/main' 2024-05-14 11:11:51 +05:30
Amith Koujalgi
2cbaf12d7c Updated library usages in README.md 2024-05-14 11:11:38 +05:30
amithkoujalgi
e2d555d404 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:29:36 +00:00
amithkoujalgi
c296b34174 [maven-release-plugin] prepare release v1.0.68 2024-05-14 05:29:35 +00:00
Amith Koujalgi
e8f99f28ec Updated library usages in README.md 2024-05-14 10:58:29 +05:30
amithkoujalgi
250b1abc79 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:07:20 +00:00
amithkoujalgi
42b15ad93f [maven-release-plugin] prepare release v1.0.67 2024-05-14 05:07:18 +00:00
Amith Koujalgi
6f7a714bae Merge remote-tracking branch 'origin/main' 2024-05-14 10:36:04 +05:30
Amith Koujalgi
92618e5084 Updated OllamaChatResponseModel to include done_reason field. Refer to the Ollama version: https://github.com/ollama/ollama/releases/tag/v0.1.37 2024-05-14 10:35:55 +05:30
amithkoujalgi
391a9242c3 [maven-release-plugin] prepare for next development iteration 2024-05-14 04:59:08 +00:00
amithkoujalgi
e1b6dc3b54 [maven-release-plugin] prepare release v1.0.66 2024-05-14 04:59:07 +00:00
Amith Koujalgi
04124cf978 Updated default request timeout to 10 seconds 2024-05-14 10:27:56 +05:30
amithkoujalgi
e4e717b747 [maven-release-plugin] prepare for next development iteration 2024-05-13 15:36:38 +00:00
amithkoujalgi
10d2a8f5ff [maven-release-plugin] prepare release v1.0.65 2024-05-13 15:36:37 +00:00
Amith Koujalgi
899fa38805 - Updated newly supported Ollama models
- Added `ConsoleOutputStreamHandler`
2024-05-13 21:05:20 +05:30
amithkoujalgi
2df878c953 [maven-release-plugin] prepare for next development iteration 2024-04-22 04:39:41 +00:00
amithkoujalgi
78a5eedc8f [maven-release-plugin] prepare release v1.0.64 2024-04-22 04:39:40 +00:00
Amith Koujalgi
364f961ee2 Merge pull request #37 from anjeongkyun/eddy/add-test1
Adds test case of OllamaChatRequestBuilder
2024-04-22 10:08:32 +05:30
anjeongkyun
b21aa6add2 Adds test of testWithKeepAlive 2024-04-21 23:52:42 +09:00
anjeongkyun
ec4abd1c2d Adds test of testWithStreaming 2024-04-21 23:49:42 +09:00
anjeongkyun
9900ae92fb Adds test of testWithTemplate 2024-04-21 23:43:49 +09:00
anjeongkyun
fa20daf6e5 Adds test case of testRequestWithOptions 2024-04-21 23:37:18 +09:00
amithkoujalgi
44949c0559 [maven-release-plugin] prepare for next development iteration 2024-03-20 10:44:54 +00:00
amithkoujalgi
e88711a017 [maven-release-plugin] prepare release v1.0.63 2024-03-20 10:44:52 +00:00
Amith Koujalgi
32169ded18 Merge pull request #33 from anjeongkyun/fix/prompt-builder-docs 2024-03-20 16:13:42 +05:30
anjeongkyun
4b2d566fd9 Fixes generate method of prompt builder 2024-03-19 21:04:26 +09:00
amithkoujalgi
fb4b7a7ce5 [maven-release-plugin] prepare for next development iteration 2024-03-02 17:49:32 +00:00
amithkoujalgi
18f27775b0 [maven-release-plugin] prepare release v1.0.62 2024-03-02 17:49:31 +00:00
Amith Koujalgi
cb462ad05a Merge remote-tracking branch 'origin/main' 2024-03-02 23:18:23 +05:30
Amith Koujalgi
1eec22ca1a Added release-assets GH action 2024-03-02 23:18:15 +05:30
amithkoujalgi
c1f3c51f88 [maven-release-plugin] prepare for next development iteration 2024-03-02 15:08:58 +00:00
amithkoujalgi
7dd556293f [maven-release-plugin] prepare release v1.0.61 2024-03-02 15:08:57 +00:00
Amith Koujalgi
ee50131ce4 Merge remote-tracking branch 'origin/main' 2024-03-02 20:37:45 +05:30
Amith Koujalgi
2cd47dbfaa [testing] - Added release-assets upload GH action 2024-03-02 20:37:38 +05:30
amithkoujalgi
e5296c1067 [maven-release-plugin] prepare for next development iteration 2024-03-02 14:58:49 +00:00
amithkoujalgi
0f00f05e3d [maven-release-plugin] prepare release v1.0.60 2024-03-02 14:58:48 +00:00
Amith Koujalgi
976a3b82e5 [testing] - Added release-assets upload GH action 2024-03-02 20:27:37 +05:30
amithkoujalgi
ba26d620c4 [maven-release-plugin] prepare for next development iteration 2024-03-02 14:46:51 +00:00
amithkoujalgi
e45246a767 [maven-release-plugin] prepare release v1.0.59 2024-03-02 14:46:50 +00:00
Amith Koujalgi
7336668f0c [testing] - Added release-assets upload GH action 2024-03-02 20:10:04 +05:30
amithkoujalgi
11701fb222 [maven-release-plugin] prepare for next development iteration 2024-02-29 03:25:18 +00:00
amithkoujalgi
b1ec12c4e9 [maven-release-plugin] prepare release v1.0.58 2024-02-29 03:25:17 +00:00
Amith Koujalgi
d0b0a0fc97 Updated model types 2024-02-29 08:54:03 +05:30
amithkoujalgi
20774fca6b [maven-release-plugin] prepare for next development iteration 2024-02-28 12:55:28 +00:00
amithkoujalgi
9c46b510d8 [maven-release-plugin] prepare release v1.0.57 2024-02-28 12:55:26 +00:00
Amith Koujalgi
9d887b60a8 Merge pull request #30 from AgentSchmecker/feature/options_for_embedding_request
Adds options to EmbeddingsRequest
2024-02-28 18:24:10 +05:30
Markus Klenke
63d4de4e24 Adds options to EmbeddingsRequest
Additionally refactors the Embedding Models and Tests
2024-02-25 20:53:45 +00:00
amithkoujalgi
a10692e2f1 [maven-release-plugin] prepare for next development iteration 2024-02-20 03:23:40 +00:00
amithkoujalgi
b0c152a42e [maven-release-plugin] prepare release v1.0.56 2024-02-20 03:23:39 +00:00
Amith Koujalgi
f44767e023 updated docs 2024-02-20 08:52:30 +05:30
amithkoujalgi
aadef0a57c [maven-release-plugin] prepare for next development iteration 2024-02-19 05:38:03 +00:00
amithkoujalgi
777ee7ffe0 [maven-release-plugin] prepare release v1.0.55 2024-02-19 05:38:02 +00:00
Amith Koujalgi
dcf1d0bdbc Merge pull request #29 from AgentSchmecker/feature/add-streaming-to-generate-api
Adds streaming feature to generate API
2024-02-19 11:06:57 +05:30
Markus Klenke
13b7111a42 Adds toString implementation for Model and ModelMeta to be json represented 2024-02-18 22:53:34 +00:00
Markus Klenke
09442d37a3 Fixes unmarshalling exception on ModelDetail 2024-02-18 22:53:01 +00:00
Markus Klenke
1e66bdb07f Adds documentation for streamed generate API call 2024-02-18 21:41:35 +00:00
Markus Klenke
b423090db9 Merge branch 'amithkoujalgi:main' into feature/add-streaming-to-generate-api 2024-02-18 22:30:20 +01:00
amithkoujalgi
a32d94efbf [maven-release-plugin] prepare for next development iteration 2024-02-18 05:22:10 +00:00
amithkoujalgi
31f8302849 [maven-release-plugin] prepare release v1.0.54 2024-02-18 05:22:08 +00:00
Amith Koujalgi
6487756764 Merge pull request #28 from AgentSchmecker/feature/advanced_params_for_generate
Adds advanced request parameters to chat/generate requests
2024-02-18 10:51:04 +05:30
Markus Klenke
abb76ad867 Adds streaming feature to Generate APIs 2024-02-16 17:03:15 +00:00
Markus Klenke
cf4e7a96e8 Optimizes ChatStreamObserver to use only the last message instead of parsing all messages again 2024-02-16 16:31:39 +00:00
Markus Klenke
0f414f71a3 Changes isEmpty method for BooleanToJsonFormatFlagSerializer to override non deprecated supermethod 2024-02-16 16:01:18 +00:00
Markus Klenke
2b700fdad8 Adds missing pom dependency for JSON comparison tests 2024-02-16 15:58:48 +00:00
Markus Klenke
06c5daa253 Adds additional properties to chat and generate requests 2024-02-16 15:57:48 +00:00
Markus Klenke
91aab6cbd1 Fixes recursive call in non streamed chat API 2024-02-16 15:57:14 +00:00
Markus Klenke
f38a00ebdc Fixes BooleanToJsonFormatFlagSerializer 2024-02-16 15:56:32 +00:00
Markus Klenke
0f73ea75ab Removes unnecessary serialize method of Serializer 2024-02-16 15:56:02 +00:00
Markus Klenke
8fe869afdb Adds additional request properties and refactors common request fields to OllamaCommonRequestModel 2024-02-16 13:15:24 +00:00
amithkoujalgi
2d274c4f5b [maven-release-plugin] prepare for next development iteration 2024-02-16 04:42:42 +00:00
amithkoujalgi
713a3239a4 [maven-release-plugin] prepare release v1.0.53 2024-02-16 04:42:40 +00:00
Amith Koujalgi
a9e7958d44 Merge pull request #26 from AgentSchmecker/main
Adds streaming functionality for chat
2024-02-16 10:11:32 +05:30
Markus Klenke
f38e84053f Adds documentation for streamed chat API call 2024-02-14 16:45:46 +00:00
Markus Klenke
7eb16b7ba0 Merge pull request #1 from AgentSchmecker/feature/streaming-chat
Adds streaming functionality for chat
2024-02-14 16:30:51 +01:00
amithkoujalgi
5a3889d8ee [maven-release-plugin] prepare for next development iteration 2024-02-14 13:44:30 +00:00
amithkoujalgi
2c52f4d0bb [maven-release-plugin] prepare release v1.0.52 2024-02-14 13:44:29 +00:00
Amith Koujalgi
32c4231eb5 Merge pull request #25 from AgentSchmecker/feature/chat_messages_with_images
Adds image capability to chat API
2024-02-14 19:13:22 +05:30
Markus Klenke
e9621f054d Adds integration test for chat streaming API 2024-02-13 18:11:59 +00:00
Markus Klenke
b41b62220c Adds chat with stream functionality in OllamaAPI 2024-02-13 17:59:27 +00:00
Markus Klenke
c89440cbca Adds OllamaStream handling 2024-02-13 17:56:07 +00:00
Markus Klenke
1aeb555a53 Adds documentation for chat with images use case 2024-02-13 10:22:13 +00:00
Markus Klenke
9aff3ec5d9 Extends chat with image integration test to use history 2024-02-13 10:20:37 +00:00
Markus Klenke
b4eaf0cfb5 Fixes ChatRequestBuilder 2024-02-13 10:20:06 +00:00
Markus Klenke
199cb6082d Cleans up unused imports 2024-02-12 22:09:55 +00:00
Markus Klenke
37bfe26a6d Adds integrationtest for chatImagesByURL and formats test cases 2024-02-12 22:08:53 +00:00
Markus Klenke
3769386539 Changes images property of ChatMessage to type byte[] 2024-02-12 22:08:10 +00:00
Markus Klenke
84a6e57f42 Refactors loadImageBytesFromUrl to Utils class 2024-02-12 22:07:15 +00:00
Markus Klenke
14d2474ee9 Adds integrationtestcase for chat message with image content 2024-02-12 21:35:43 +00:00
Markus Klenke
ca613ed80a Adds base64 encoding for chat-API request messages 2024-02-12 21:32:07 +00:00
Markus Klenke
bbcd458849 Adds sync-response interpreter for responseCode 400 2024-02-12 21:30:55 +00:00
Markus Klenke
bc885894f8 Extends OllamaChatResponseModel with error property 2024-02-12 21:29:27 +00:00
amithkoujalgi
bc83df6971 [maven-release-plugin] prepare for next development iteration 2024-02-12 16:34:11 +00:00
amithkoujalgi
43f43c9f81 [maven-release-plugin] prepare release v1.0.51 2024-02-12 16:34:10 +00:00
Amith Koujalgi
65f00defcf Updates to javadoc 2024-02-12 22:03:01 +05:30
Amith Koujalgi
d716b81342 Merge pull request #23 from AgentSchmecker/feature/chat-request-model
Adds feature for /api/chat access via OllamaAPI
2024-02-12 21:36:33 +05:30
Markus Klenke
272ba445f6 Moves documentation to end of sidebar group 2024-02-10 23:23:01 +00:00
Markus Klenke
d9816d8869 Removes unused import 2024-02-10 23:13:11 +00:00
Markus Klenke
874736eb16 adds documentation for chat API 2024-02-10 22:58:50 +00:00
Markus Klenke
9c16ccbf81 Changes test texts for chat integration tests 2024-02-10 22:58:29 +00:00
Markus Klenke
40a3aa31dc Adds helper reset method to OllamaChatRequestBuilder 2024-02-10 22:51:47 +00:00
Markus Klenke
90669b611b Changes implementation of withMessages in ChatRequestBuilder 2024-02-10 22:28:50 +00:00
Markus Klenke
f10c7ac725 Merge remote-tracking branch 'upstream/main' into feature/chat-request-model 2024-02-10 22:09:59 +00:00
amithkoujalgi
38dca3cd0d [maven-release-plugin] prepare for next development iteration 2024-02-10 05:13:11 +00:00
amithkoujalgi
44bb35b168 [maven-release-plugin] prepare release v1.0.50 2024-02-10 05:13:09 +00:00
Amith Koujalgi
9832caf503 Update intg test class 2024-02-10 10:41:53 +05:30
Markus Klenke
0c4e8e306e Organizes imports 2024-02-10 00:26:25 +00:00
Markus Klenke
075416eb9c Adds Integration tests for chat API 2024-02-10 00:25:09 +00:00
Markus Klenke
4260fbbc32 Fixes OllamaAPI after Renaming 2024-02-10 00:24:12 +00:00
Markus Klenke
0bec697a86 Specifies OllamaChatResponseModel 2024-02-10 00:23:26 +00:00
Markus Klenke
4ca6eef8fd Fixes JSON Binding for OllamaChatMessageRole 2024-02-10 00:22:55 +00:00
Markus Klenke
a635dd9be2 Renames request caller to endpoint caller 2024-02-10 00:22:21 +00:00
Markus Klenke
14982011d9 Adds chat-API with OllamaChatResult 2024-02-09 22:49:28 +00:00
Markus Klenke
65d852fdc9 Creates OllamaChatResult extension class 2024-02-09 22:48:48 +00:00
Markus Klenke
d483c23c81 Adds documentation to OllamaServerCaller 2024-02-09 22:48:19 +00:00
Markus Klenke
273b1e47ca Removes unneccessary override of OllamaRequestBody method 2024-02-09 21:58:17 +00:00
Markus Klenke
5c5cdba4cd Merge remote-tracking branch 'origin/main' into feature/chat-request-model 2024-02-09 21:56:10 +00:00
amithkoujalgi
24674ea483 [maven-release-plugin] prepare for next development iteration 2024-02-09 18:19:37 +00:00
Markus Klenke
f9063484f3 Fixes recursive JacksonMarshalling on BodyPublisher 2024-02-09 17:07:18 +00:00
Markus Klenke
00a3e51a93 Extends OllamaAPI by Chat methods and refactors synchronous Generate API Methods 2024-02-09 16:32:52 +00:00
Markus Klenke
bc20468f28 Applies OllamaRequestBody to OllamaRequestModel 2024-02-09 16:30:12 +00:00
Markus Klenke
c7ac50a805 Defines technical Request Caller Object 2024-02-09 16:29:35 +00:00
Markus Klenke
f8cd7bc013 Adds model for Chat Requests to Ollama-API 2024-02-09 16:28:27 +00:00
Markus Klenke
3469bf314b Removes unused Imports 2024-02-09 13:36:49 +00:00
51 changed files with 2395 additions and 652 deletions

View File

@@ -67,10 +67,29 @@ In your Maven project, add this dependency:
<dependency>
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.47</version>
<version>1.0.70</version>
</dependency>
```
or
In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL:
```kotlin
dependencies {
val ollama4jVersion = "1.0.70"
implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion")
}
```
```groovy
dependencies {
implementation("io.github.amithkoujalgi:ollama4j:1.0.70")
}
```
Latest release:
![Maven Central](https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j)
@@ -110,6 +129,16 @@ make it
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
Actions CI workflow.
#### Who's using Ollama4j?
- `Datafaker`: a library to generate fake data
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
- https://github.com/TEAMPB/ollama4j-vaadin-ui
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
server to translate all messages into a specfic target language.
- https://github.com/liebki/ollama-translator
#### Traction
[![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date)
@@ -125,15 +154,15 @@ Actions CI workflow.
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
- [ ] Add custom headers to requests
- [ ] Add additional params for `ask` APIs such as:
- [x] Add additional params for `ask` APIs such as:
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
- [ ] `system`: system prompt to (overrides what is defined in the Modelfile)
- [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- [ ] `context`: the context parameter returned from a previous request, which can be used to keep a
- [x] `system`: system prompt to (overrides what is defined in the Modelfile)
- [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- [x] `context`: the context parameter returned from a previous request, which can be used to keep a
short
conversational memory
- [ ] `stream`: Add support for streaming responses from the model
- [x] `stream`: Add support for streaming responses from the model
- [ ] Add test cases
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
@@ -150,4 +179,4 @@ project.
### References
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Extras",
"position": 10,
"position": 4,
"link": {
"type": "generated-index",
"description": "Details of APIs to handle bunch of extra stuff."

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Ask",
"position": 10,
"label": "APIs - Generate",
"position": 3,
"link": {
"type": "generated-index",
"description": "Details of APIs to interact with LLMs."

View File

@@ -0,0 +1,205 @@
---
sidebar_position: 7
---
# Chat
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
information using the history of already asked questions and the respective answers.
## Create a new conversation and use chat history to augment follow up questions
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse());
// create next userQuestion
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
// "continue" conversation with model
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponse());
System.out.println("Chat History: " + chatResult.getChatHistory());
}
}
```
You will get a response similar to:
> First answer: Should be Paris!
>
> Second answer: Marseille.
>
> Chat History:
```json
[
{
"role": "user",
"content": "What is the capital of France?",
"images": []
},
{
"role": "assistant",
"content": "Should be Paris!",
"images": []
},
{
"role": "user",
"content": "And what is the second largest city?",
"images": []
},
{
"role": "assistant",
"content": "Marseille.",
"images": []
}
]
```
## Create a conversation where the answer is streamed
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// define a handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
};
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
}
}
```
You will get a response similar to:
> The
> The capital
> The capital of
> The capital of France
> The capital of France is
> The capital of France is Paris
> The capital of France is Paris.
## Use a simple Console Output Stream Handler
```java
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
public class Main {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler);
}
}
```
## Create a new conversation with individual system prompt
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println(chatResult.getResponse());
}
}
```
You will get a response similar to:
> NI.
## Create a conversation about an image (requires model with image recognition skills)
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse());
builder.reset();
// Use history to ask further questions about the image or assistant answer
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponse());
}
}
```
You will get a response similar to:
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
> evening, given the warm lighting and the low position of the sun in the sky.
>
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
> confidently.

View File

@@ -41,6 +41,41 @@ You will get a response similar to:
> require
> natural language understanding and generation capabilities.
## Try asking a question, receiving the answer streamed
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
// define a stream handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
};
// Should be called using seperate thread to gain non blocking streaming effect.
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build(), streamHandler);
System.out.println("Full response: " +result.getResponse());
}
}
```
You will get a response similar to:
> The
> The capital
> The capital of
> The capital of France
> The capital of France is
> The capital of France is Paris
> The capital of France is Paris.
> Full response: The capital of France is Paris.
## Try asking a question from general topics.
```java

View File

@@ -42,7 +42,7 @@ public class AskPhi {
.addSeparator()
.add("How do I read a file in Go and print its contents to stdout?");
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
System.out.println(response.getResponse());
}
}

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Model Management",
"position": 4,
"position": 2,
"link": {
"type": "generated-index",
"description": "Details of APIs to manage LLMs."

19
pom.xml
View File

@@ -4,7 +4,7 @@
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.49</version>
<version>1.0.73-SNAPSHOT</version>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
@@ -39,7 +39,7 @@
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<tag>v1.0.49</tag>
<tag>v1.0.16</tag>
</scm>
<build>
@@ -99,7 +99,7 @@
<configuration>
<skipTests>${skipUnitTests}</skipTests>
<includes>
<include>**/unittests/*.java</include>
<include>**/unittests/**/*.java</include>
</includes>
</configuration>
</plugin>
@@ -149,7 +149,12 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.15.3</version>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
@@ -174,6 +179,12 @@
<version>4.1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20240205</version>
<scope>test</scope>
</dependency>
</dependencies>
<distributionManagement>

View File

@@ -0,0 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String>{
void accept(String message);
}

View File

@@ -0,0 +1,14 @@
package io.github.amithkoujalgi.ollama4j.core.impl;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer();
@Override
public void accept(String message) {
String substr = message.substring(response.length());
response.append(substr);
System.out.print(substr);
}
}

View File

@@ -1,6 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@@ -9,7 +14,9 @@ public class Model {
private String name;
private String model;
@JsonProperty("modified_at")
private String modifiedAt;
private OffsetDateTime modifiedAt;
@JsonProperty("expires_at")
private OffsetDateTime expiresAt;
private String digest;
private long size;
@JsonProperty("details")
@@ -34,4 +41,13 @@ public class Model {
return name.split(":")[1];
}
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@@ -16,5 +17,14 @@ public class ModelDetail {
private String parameters;
private String template;
private String system;
private Map<String, String> details;
private ModelMeta details;
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@@ -21,4 +23,13 @@ public class ModelMeta {
@JsonProperty("quantization_level")
private String quantizationLevel;
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,6 +1,8 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import java.io.BufferedReader;
import java.io.IOException;
@@ -22,7 +24,7 @@ import lombok.Getter;
@SuppressWarnings("unused")
public class OllamaAsyncResultCallback extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaRequestModel ollamaRequestModel;
private final OllamaGenerateRequestModel ollamaRequestModel;
private final Queue<String> queue = new LinkedList<>();
private String result;
private boolean isDone;
@@ -47,7 +49,7 @@ public class OllamaAsyncResultCallback extends Thread {
public OllamaAsyncResultCallback(
HttpRequest.Builder requestBuilder,
OllamaRequestModel ollamaRequestModel,
OllamaGenerateRequestModel ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
@@ -87,8 +89,8 @@ public class OllamaAsyncResultCallback extends Thread {
queue.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
OllamaGenerateResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
queue.add(ollamaResponseModel.getResponse());
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(ollamaResponseModel.getResponse());

View File

@@ -0,0 +1,35 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class OllamaCommonRequestModel {
protected String model;
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
@JsonProperty(value = "format")
protected Boolean returnFormatJson;
protected Map<String, Object> options;
protected String template;
protected boolean stream;
@JsonProperty(value = "keep_alive")
protected String keepAlive;
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,8 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import lombok.Data;
@Data

View File

@@ -0,0 +1,45 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
/**
* Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
*
* @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
*/
@Data
@AllArgsConstructor
@RequiredArgsConstructor
@NoArgsConstructor
public class OllamaChatMessage {
@NonNull
private OllamaChatMessageRole role;
@NonNull
private String content;
@JsonSerialize(using = FileToBase64Serializer.class)
private List<byte[]> images;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,19 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonValue;
/**
* Defines the possible Chat Message roles.
*/
public enum OllamaChatMessageRole {
SYSTEM("system"),
USER("user"),
ASSISTANT("assistant");
@JsonValue
private String roleName;
private OllamaChatMessageRole(String roleName){
this.roleName = roleName;
}
}

View File

@@ -0,0 +1,110 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
/**
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
*/
public class OllamaChatRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
request = new OllamaChatRequestModel(model, messages);
}
private OllamaChatRequestModel request;
public static OllamaChatRequestBuilder getInstance(String model){
return new OllamaChatRequestBuilder(model, new ArrayList<>());
}
public OllamaChatRequestModel build(){
return request;
}
public void reset(){
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){
List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = images.stream().map(file -> {
try {
return Files.readAllBytes(file.toPath());
} catch (IOException e) {
LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e);
return new byte[0];
}
}).collect(Collectors.toList());
messages.add(new OllamaChatMessage(role,content,binaryImages));
return this;
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){
List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = null;
if(imageUrls.length>0){
binaryImages = new ArrayList<>();
for (String imageUrl : imageUrls) {
try{
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
}
catch (URISyntaxException e){
LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e);
}
catch (IOException e){
LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
}
}
}
messages.add(new OllamaChatMessage(role,content,binaryImages));
return this;
}
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
this.request.getMessages().addAll(messages);
return this;
}
public OllamaChatRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaChatRequestBuilder withGetJsonResponse(){
this.request.setReturnFormatJson(true);
return this;
}
public OllamaChatRequestBuilder withTemplate(String template){
this.request.setTemplate(template);
return this;
}
public OllamaChatRequestBuilder withStreaming(){
this.request.setStream(true);
return this;
}
public OllamaChatRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -0,0 +1,39 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import lombok.Getter;
import lombok.Setter;
/**
* Defines a Request to use against the ollama /api/chat endpoint.
*
* @see <a href=
* "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
* Chat Completion</a>
*/
@Getter
@Setter
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
private List<OllamaChatMessage> messages;
public OllamaChatRequestModel() {}
public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
this.model = model;
this.messages = messages;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaChatRequestModel)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -0,0 +1,23 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
@Data
public class OllamaChatResponseModel {
private String model;
private @JsonProperty("created_at") String createdAt;
private @JsonProperty("done_reason") String doneReason;
private OllamaChatMessage message;
private boolean done;
private String error;
private List<Integer> context;
private @JsonProperty("total_duration") Long totalDuration;
private @JsonProperty("load_duration") Long loadDuration;
private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
private @JsonProperty("eval_duration") Long evalDuration;
private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
private @JsonProperty("eval_count") Integer evalCount;
}

View File

@@ -0,0 +1,32 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
/**
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
* {@link OllamaChatMessageRole#ASSISTANT} role.
*/
public class OllamaChatResult extends OllamaResult{
private List<OllamaChatMessage> chatHistory;
public OllamaChatResult(String response, long responseTime, int httpStatusCode,
List<OllamaChatMessage> chatHistory) {
super(response, responseTime, httpStatusCode);
this.chatHistory = chatHistory;
appendAnswerToChatHistory(response);
}
public List<OllamaChatMessage> getChatHistory() {
return chatHistory;
}
private void appendAnswerToChatHistory(String answer){
OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
this.chatHistory.add(assistantMessage);
}
}

View File

@@ -0,0 +1,31 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.ArrayList;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaChatStreamObserver {
private OllamaStreamHandler streamHandler;
private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
private String message = "";
public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
this.streamHandler = streamHandler;
}
public void notify(OllamaChatResponseModel currentResponsePart){
responseParts.add(currentResponsePart);
handleCurrentResponsePart(currentResponsePart);
}
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
message = message + currentResponsePart.getMessage().getContent();
streamHandler.accept(message);
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -7,7 +7,7 @@ import lombok.Data;
@SuppressWarnings("unused")
@Data
public class EmbeddingResponse {
public class OllamaEmbeddingResponseModel {
@JsonProperty("embedding")
private List<Double> embedding;
}

View File

@@ -0,0 +1,31 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
public class OllamaEmbeddingsRequestBuilder {
private OllamaEmbeddingsRequestBuilder(String model, String prompt){
request = new OllamaEmbeddingsRequestModel(model, prompt);
}
private OllamaEmbeddingsRequestModel request;
public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
return new OllamaEmbeddingsRequestBuilder(model, prompt);
}
public OllamaEmbeddingsRequestModel build(){
return request;
}
public OllamaEmbeddingsRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -1,31 +1,28 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
@Data
public class OllamaRequestModel {
@RequiredArgsConstructor
@NoArgsConstructor
public class OllamaEmbeddingsRequestModel {
@NonNull
private String model;
@NonNull
private String prompt;
private List<String> images;
private Map<String, Object> options;
public OllamaRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
protected Map<String, Object> options;
@JsonProperty(value = "keep_alive")
private String keepAlive;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);

View File

@@ -0,0 +1,55 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
/**
* Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}
* objects using the builder-pattern.
*/
public class OllamaGenerateRequestBuilder {
private OllamaGenerateRequestBuilder(String model, String prompt){
request = new OllamaGenerateRequestModel(model, prompt);
}
private OllamaGenerateRequestModel request;
public static OllamaGenerateRequestBuilder getInstance(String model){
return new OllamaGenerateRequestBuilder(model,"");
}
public OllamaGenerateRequestModel build(){
return request;
}
public OllamaGenerateRequestBuilder withPrompt(String prompt){
request.setPrompt(prompt);
return this;
}
public OllamaGenerateRequestBuilder withGetJsonResponse(){
this.request.setReturnFormatJson(true);
return this;
}
public OllamaGenerateRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaGenerateRequestBuilder withTemplate(String template){
this.request.setTemplate(template);
return this;
}
public OllamaGenerateRequestBuilder withStreaming(){
this.request.setStream(true);
return this;
}
public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -0,0 +1,46 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequestModel() {
}
public OllamaGenerateRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequestModel)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -8,7 +8,7 @@ import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaResponseModel {
public class OllamaGenerateResponseModel {
private String model;
private @JsonProperty("created_at") String createdAt;
private String response;

View File

@@ -0,0 +1,31 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import java.util.ArrayList;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaGenerateStreamObserver {
private OllamaStreamHandler streamHandler;
private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
private String message = "";
public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
this.streamHandler = streamHandler;
}
public void notify(OllamaGenerateResponseModel currentResponsePart){
responseParts.add(currentResponsePart);
handleCurrentResponsePart(currentResponsePart);
}
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
message = message + currentResponsePart.getResponse();
streamHandler.accept(message);
}
}

View File

@@ -1,23 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class ModelEmbeddingsRequest {
private String model;
private String prompt;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,55 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Specialization class for requests
*/
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
private OllamaChatStreamObserver streamObserver;
public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/chat";
}
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
if (streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!", e);
return true;
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
streamObserver = new OllamaChatStreamObserver(streamHandler);
return super.callSync(body);
}
}

View File

@@ -0,0 +1,152 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Base64;
/**
* Abstract helperclass to call the ollama api server.
*/
public abstract class OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
private String host;
private BasicAuth basicAuth;
private long requestTimeoutSeconds;
private boolean verbose;
public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
this.host = host;
this.basicAuth = basicAuth;
this.requestTimeoutSeconds = requestTimeoutSeconds;
this.verbose = verbose;
}
protected abstract String getEndpointSuffix();
protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
/**
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
*
* @param body POST body payload
* @return result answer given by the assistant
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen
*/
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
// Create Request
long startTime = System.currentTimeMillis();
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(this.host + getEndpointSuffix());
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
if (this.verbose) LOG.info("Asking model: " + body.toString());
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
if (finished) {
break;
}
}
}
}
if (statusCode != 200) {
LOG.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
long endTime = System.currentTimeMillis();
OllamaResult ollamaResult =
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
if (verbose) LOG.info("Model response: " + ollamaResult);
return ollamaResult;
}
}
/**
* Get default request builder.
*
* @param uri URI to get a HttpRequest.Builder
* @return HttpRequest.Builder
*/
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder =
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
}
return requestBuilder;
}
/**
* Get basic authentication header value.
*
* @return basic authentication header value (encoded credentials)
*/
private String getBasicAuthHeaderValue() {
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
}
/**
* Check if Basic Auth credentials set.
*
* @return true when Basic Auth credentials set
*/
private boolean isBasicAuthCredentialsSet() {
return this.basicAuth != null;
}
}

View File

@@ -0,0 +1,54 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
private OllamaGenerateStreamObserver streamObserver;
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/generate";
}
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
responseBuffer.append(ollamaResponseModel.getResponse());
if(streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!",e);
return true;
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
return super.callSync(body);
}
}

View File

@@ -8,57 +8,75 @@ package io.github.amithkoujalgi.ollama4j.core.types;
*/
@SuppressWarnings("ALL")
public class OllamaModelType {
public static final String LLAMA2 = "llama2";
public static final String MISTRAL = "mistral";
public static final String LLAVA = "llava";
public static final String MIXTRAL = "mixtral";
public static final String STARLING_LM = "starling-lm";
public static final String NEURAL_CHAT = "neural-chat";
public static final String CODELLAMA = "codellama";
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
public static final String ORCA_MINI = "orca-mini";
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String PHI = "phi";
public static final String ZEPHYR = "zephyr";
public static final String WIZARDCODER = "wizardcoder";
public static final String MISTRAL_OPENORCA = "mistral-openorca";
public static final String NOUS_HERMES = "nous-hermes";
public static final String DEEPSEEK_CODER = "deepseek-coder";
public static final String WIZARD_MATH = "wizard-math";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String FALCON = "falcon";
public static final String ORCA2 = "orca2";
public static final String STABLE_BELUGA = "stable-beluga";
public static final String CODEUP = "codeup";
public static final String EVERYTHINGLM = "everythinglm";
public static final String MEDLLAMA2 = "medllama2";
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
public static final String STARCODER = "starcoder";
public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
public static final String OPENCHAT = "openchat";
public static final String WIZARD_VICUNA = "wizard-vicuna";
public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
public static final String YI = "yi";
public static final String YARN_MISTRAL = "yarn-mistral";
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
public static final String SQLCODER = "sqlcoder";
public static final String YARN_LLAMA2 = "yarn-llama2";
public static final String MEDITRON = "meditron";
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
public static final String DEEPSEEK_LLM = "deepseek-llm";
public static final String MISTRALLITE = "mistrallite";
public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
public static final String WIZARDLM = "wizardlm";
public static final String CODEBOOGA = "codebooga";
public static final String MAGICODER = "magicoder";
public static final String GOLIATH = "goliath";
public static final String NEXUSRAVEN = "nexusraven";
public static final String ALFRED = "alfred";
public static final String XWINLM = "xwinlm";
public static final String BAKLLAVA = "bakllava";
public static final String GEMMA = "gemma";
public static final String LLAMA2 = "llama2";
public static final String LLAMA3 = "llama3";
public static final String MISTRAL = "mistral";
public static final String MIXTRAL = "mixtral";
public static final String LLAVA = "llava";
public static final String LLAVA_PHI3 = "llava-phi3";
public static final String NEURAL_CHAT = "neural-chat";
public static final String CODELLAMA = "codellama";
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
public static final String MISTRAL_OPENORCA = "mistral-openorca";
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
public static final String PHI = "phi";
public static final String PHI3 = "phi3";
public static final String ORCA_MINI = "orca-mini";
public static final String DEEPSEEK_CODER = "deepseek-coder";
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String ZEPHYR = "zephyr";
public static final String OPENHERMES = "openhermes";
public static final String QWEN = "qwen";
public static final String WIZARDCODER = "wizardcoder";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String TINYLLAMA = "tinyllama";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String OPENCHAT = "openchat";
public static final String ORCA2 = "orca2";
public static final String FALCON = "falcon";
public static final String WIZARD_MATH = "wizard-math";
public static final String TINYDOLPHIN = "tinydolphin";
public static final String NOUS_HERMES = "nous-hermes";
public static final String YI = "yi";
public static final String DOLPHIN_PHI = "dolphin-phi";
public static final String STARLING_LM = "starling-lm";
public static final String STARCODER = "starcoder";
public static final String CODEUP = "codeup";
public static final String MEDLLAMA2 = "medllama2";
public static final String STABLE_CODE = "stable-code";
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
public static final String BAKLLAVA = "bakllava";
public static final String EVERYTHINGLM = "everythinglm";
public static final String SOLAR = "solar";
public static final String STABLE_BELUGA = "stable-beluga";
public static final String SQLCODER = "sqlcoder";
public static final String YARN_MISTRAL = "yarn-mistral";
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
public static final String MEDITRON = "meditron";
public static final String WIZARD_VICUNA = "wizard-vicuna";
public static final String STABLELM2 = "stablelm2";
public static final String MAGICODER = "magicoder";
public static final String YARN_LLAMA2 = "yarn-llama2";
public static final String NOUS_HERMES2 = "nous-hermes2";
public static final String DEEPSEEK_LLM = "deepseek-llm";
public static final String LLAMA_PRO = "llama-pro";
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
public static final String CODEBOOGA = "codebooga";
public static final String MISTRALLITE = "mistrallite";
public static final String NEXUSRAVEN = "nexusraven";
public static final String GOLIATH = "goliath";
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
public static final String NOTUX = "notux";
public static final String ALFRED = "alfred";
public static final String MEGADOLPHIN = "megadolphin";
public static final String WIZARDLM = "wizardlm";
public static final String XWINLM = "xwinlm";
public static final String NOTUS = "notus";
public static final String DUCKDB_NSQL = "duckdb-nsql";
public static final String ALL_MINILM = "all-minilm";
}

View File

@@ -0,0 +1,21 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.IOException;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
@Override
public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeString("json");
}
@Override
public boolean isEmpty(SerializerProvider provider,Boolean value){
return !value;
}
}

View File

@@ -0,0 +1,21 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.IOException;
import java.util.Base64;
import java.util.Collection;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
@Override
public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException {
jsonGenerator.writeStartArray();
for (byte[] file : value) {
jsonGenerator.writeString(Base64.getEncoder().encodeToString(file));
}
jsonGenerator.writeEndArray();
}
}

View File

@@ -0,0 +1,28 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.net.http.HttpRequest.BodyPublisher;
import java.net.http.HttpRequest.BodyPublishers;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
/**
* Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}.
*/
public interface OllamaRequestBody {
/**
* Transforms the OllamaRequest Object to a JSON Object via Jackson.
*
* @return JSON representation of a OllamaRequest
*/
@JsonIgnore
default BodyPublisher getBodyPublisher(){
try {
return BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(this));
} catch (JsonProcessingException e) {
throw new IllegalArgumentException("Request not Body convertible.",e);
}
}
}

View File

@@ -1,9 +1,38 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
public class Utils {
private static ObjectMapper objectMapper;
public static ObjectMapper getObjectMapper() {
return new ObjectMapper();
if(objectMapper == null) {
objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
}
return objectMapper;
}
public static byte[] loadImageBytesFromUrl(String imageUrl)
throws IOException, URISyntaxException {
URL url = new URI(imageUrl).toURL();
try (InputStream in = url.openStream();
ByteArrayOutputStream out = new ByteArrayOutputStream()) {
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
return out.toByteArray();
}
}
}

View File

@@ -4,8 +4,14 @@ import static org.junit.jupiter.api.Assertions.*;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
import java.io.File;
import java.io.IOException;
@@ -16,26 +22,19 @@ import java.net.http.HttpConnectTimeoutException;
import java.util.List;
import java.util.Objects;
import java.util.Properties;
import lombok.Data;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class TestRealAPIs {
OllamaAPI ollamaAPI;
private Properties loadProperties() {
Properties properties = new Properties();
try (InputStream input =
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
if (input == null) {
throw new RuntimeException("Sorry, unable to find test-config.properties");
}
properties.load(input);
return properties;
} catch (IOException e) {
throw new RuntimeException("Error loading properties", e);
}
}
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
OllamaAPI ollamaAPI;
Config config;
private File getImageFileFromClasspath(String fileName) {
ClassLoader classLoader = getClass().getClassLoader();
@@ -44,9 +43,9 @@ class TestRealAPIs {
@BeforeEach
void setUp() {
Properties properties = loadProperties();
ollamaAPI = new OllamaAPI(properties.getProperty("ollama.api.url"));
ollamaAPI.setRequestTimeoutSeconds(20);
config = new Config();
ollamaAPI = new OllamaAPI(config.getOllamaURL());
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
}
@Test
@@ -64,7 +63,7 @@ class TestRealAPIs {
} catch (HttpConnectTimeoutException e) {
fail(e.getMessage());
} catch (Exception e) {
throw new RuntimeException(e);
fail(e);
}
}
@@ -76,7 +75,7 @@ class TestRealAPIs {
assertNotNull(ollamaAPI.listModels());
ollamaAPI.listModels().forEach(System.out::println);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
fail(e);
}
}
@@ -85,13 +84,26 @@ class TestRealAPIs {
void testPullModel() {
testEndpointReachability();
try {
ollamaAPI.pullModel(OllamaModelType.LLAMA2);
ollamaAPI.pullModel(config.getModel());
boolean found =
ollamaAPI.listModels().stream()
.anyMatch(model -> model.getModelName().equals(OllamaModelType.LLAMA2));
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
assertTrue(found);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
fail(e);
}
}
@Test
@Order(3)
void testListDtails() {
testEndpointReachability();
try {
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
assertNotNull(modelDetails);
System.out.println(modelDetails);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@@ -102,14 +114,40 @@ class TestRealAPIs {
try {
OllamaResult result =
ollamaAPI.generate(
OllamaModelType.LLAMA2,
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithDefaultOptionsStreamed() {
testEndpointReachability();
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@@ -120,14 +158,129 @@ class TestRealAPIs {
try {
OllamaResult result =
ollamaAPI.generate(
OllamaModelType.LLAMA2,
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().setTemperature(0.9f).build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
fail(e);
}
}
@Test
@Order(3)
void testChat() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
.withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertEquals(4,chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithSystemPrompt() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertTrue(chatResult.getResponse().startsWith("NI"));
assertEquals(3, chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithStream() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
StringBuffer sb = new StringBuffer("");
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(chatResult);
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromFileWithHistoryRecognition() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder =
OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
builder.reset();
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromURL() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@@ -139,7 +292,7 @@ class TestRealAPIs {
try {
OllamaResult result =
ollamaAPI.generateWithImageFiles(
OllamaModelType.LLAVA,
config.getImageModel(),
"What is in this image?",
List.of(imageFile),
new OptionsBuilder().build());
@@ -147,7 +300,31 @@ class TestRealAPIs {
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageFilesStreamed() {
testEndpointReachability();
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@@ -158,7 +335,7 @@ class TestRealAPIs {
try {
OllamaResult result =
ollamaAPI.generateWithImageURLs(
OllamaModelType.LLAVA,
config.getImageModel(),
"What is in this image?",
List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
@@ -167,7 +344,50 @@ class TestRealAPIs {
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
fail(e);
}
}
@Test
@Order(3)
public void testEmbedding() {
testEndpointReachability();
try {
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
.getInstance(config.getModel(), "What is the capital of France?").build();
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
assertNotNull(embeddings);
assertFalse(embeddings.isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
}
@Data
class Config {
private String ollamaURL;
private String model;
private String imageModel;
private int requestTimeoutSeconds;
public Config() {
Properties properties = new Properties();
try (InputStream input =
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
if (input == null) {
throw new RuntimeException("Sorry, unable to find test-config.properties");
}
properties.load(input);
this.ollamaURL = properties.getProperty("ollama.url");
this.model = properties.getProperty("ollama.model");
this.imageModel = properties.getProperty("ollama.model.image");
this.requestTimeoutSeconds =
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
} catch (IOException e) {
throw new RuntimeException("Error loading properties", e);
}
}
}

View File

@@ -0,0 +1,35 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
public abstract class AbstractSerializationTest<T> {
protected ObjectMapper mapper = Utils.getObjectMapper();
protected String serialize(T obj) {
try {
return mapper.writeValueAsString(obj);
} catch (JsonProcessingException e) {
fail("Could not serialize request!", e);
return null;
}
}
protected T deserialize(String jsonObject, Class<T> deserializationClass) {
try {
return mapper.readValue(jsonObject, deserializationClass);
} catch (JsonProcessingException e) {
fail("Could not deserialize jsonObject!", e);
return null;
}
}
protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
T req) {
assertEquals(req, unmarshalledObject);
}
}

View File

@@ -0,0 +1,113 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.File;
import java.util.List;
import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
private OllamaChatRequestBuilder builder;
@BeforeEach
public void init() {
builder = OllamaChatRequestBuilder.getInstance("DummyModel");
}
@Test
public void testRequestOnlyMandatoryFields() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestMultipleMessages() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestWithMessageAndImage() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setMirostat(1).build())
.withOptions(b.setTemperature(1L).build())
.withOptions(b.setMirostatEta(1L).build())
.withOptions(b.setMirostatTau(1L).build())
.withOptions(b.setNumGpu(1).build())
.withOptions(b.setSeed(1).build())
.withOptions(b.setTopK(1).build())
.withOptions(b.setTopP(1).build())
.build();
String jsonRequest = serialize(req);
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
assertEquals(1, deserializeRequest.getOptions().get("seed"));
assertEquals(1, deserializeRequest.getOptions().get("top_k"));
assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
}
@Test
public void testWithJsonFormat() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withGetJsonResponse().build();
String jsonRequest = serialize(req);
// no jackson deserialization as format property is not boolean ==> omit as deserialization
// of request is never used in real code anyways
JSONObject jsonObject = new JSONObject(jsonRequest);
String requestFormatProperty = jsonObject.getString("format");
assertEquals("json", requestFormatProperty);
}
@Test
public void testWithTemplate() {
OllamaChatRequestModel req = builder.withTemplate("System Template")
.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
}
@Test
public void testWithStreaming() {
OllamaChatRequestModel req = builder.withStreaming().build();
String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
}
@Test
public void testWithKeepAlive() {
String expectedKeepAlive = "5m";
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
.build();
String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
}
}

View File

@@ -0,0 +1,37 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
private OllamaEmbeddingsRequestBuilder builder;
@BeforeEach
public void init() {
builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt");
}
@Test
public void testRequestOnlyMandatoryFields() {
OllamaEmbeddingsRequestModel req = builder.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
}
@Test
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaEmbeddingsRequestModel req = builder
.withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req);
OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
}
}

View File

@@ -0,0 +1,56 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
private OllamaGenerateRequestBuilder builder;
@BeforeEach
public void init() {
builder = OllamaGenerateRequestBuilder.getInstance("DummyModel");
}
@Test
public void testRequestOnlyMandatoryFields() {
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
}
@Test
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaGenerateRequestModel req =
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req);
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
}
@Test
public void testWithJsonFormat() {
OllamaGenerateRequestModel req =
builder.withPrompt("Some prompt").withGetJsonResponse().build();
String jsonRequest = serialize(req);
// no jackson deserialization as format property is not boolean ==> omit as deserialization
// of request is never used in real code anyways
JSONObject jsonObject = new JSONObject(jsonRequest);
String requestFormatProperty = jsonObject.getString("format");
assertEquals("json", requestFormatProperty);
}
}

View File

@@ -0,0 +1,42 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import io.github.amithkoujalgi.ollama4j.core.models.Model;
import org.junit.jupiter.api.Test;
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
@Test
public void testDeserializationOfModelResponseWithOffsetTime(){
String serializedTestStringWithOffsetTime = "{\n"
+ "\"name\": \"codellama:13b\",\n"
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n"
+ "\"size\": 7365960935,\n"
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
+ "\"details\": {\n"
+ "\"format\": \"gguf\",\n"
+ "\"family\": \"llama\",\n"
+ "\"families\": null,\n"
+ "\"parameter_size\": \"13B\",\n"
+ "\"quantization_level\": \"Q4_0\"\n"
+ "}}";
deserialize(serializedTestStringWithOffsetTime,Model.class);
}
@Test
public void testDeserializationOfModelResponseWithZuluTime(){
String serializedTestStringWithZuluTimezone = "{\n"
+ "\"name\": \"codellama:13b\",\n"
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n"
+ "\"size\": 7365960935,\n"
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
+ "\"details\": {\n"
+ "\"format\": \"gguf\",\n"
+ "\"family\": \"llama\",\n"
+ "\"families\": null,\n"
+ "\"parameter_size\": \"13B\",\n"
+ "\"quantization_level\": \"Q4_0\"\n"
+ "}}";
deserialize(serializedTestStringWithZuluTimezone,Model.class);
}
}

View File

@@ -1,2 +1,4 @@
ollama.api.url=http://192.168.29.223:11434
ollama.model=llava
ollama.url=http://localhost:11434
ollama.model=qwen:0.5b
ollama.model.image=llava
ollama.request-timeout-seconds=120