Compare commits

..

27 Commits

Author SHA1 Message Date
Amith Koujalgi
11a98a72a1 Updated docusaurus.config.js
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:22:37 +05:30
Amith Koujalgi
422601c0fc Updated ModelsProcessResponse class to support ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:11:15 +05:30
Amith Koujalgi
75e6576a13 Updated docs
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:00:54 +05:30
Amith Koujalgi
51dd3f3e1e Added Json ignore properties for ModelsProcessResponse
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 10:46:35 +05:30
Amith Koujalgi
30250f79d9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-28 00:06:55 +05:30
Amith Koujalgi
d4ee9ed051 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:55:21 +05:30
Amith Koujalgi
4412ac683a updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:53:54 +05:30
Amith Koujalgi
b5b1a26941 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:52:00 +05:30
Amith Koujalgi
a84230bbd1 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:30:53 +05:30
Amith Koujalgi
00c9b16556 Refactored classes into request and response packages
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:20:18 +05:30
Amith Koujalgi
9a2194334f Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:58:53 +05:30
Amith Koujalgi
f9cf11ecdf Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:54:04 +05:30
Amith Koujalgi
0af80865c3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:59:15 +05:30
Amith Koujalgi
a304c01194 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:58:43 +05:30
Amith Koujalgi
887708864e updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:56:19 +05:30
Amith Koujalgi
2f0c4fdcc9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:54:34 +05:30
Amith Koujalgi
73aabd7ca6 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:48:05 +05:30
Amith Koujalgi
17ca2bdee3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:46:53 +05:30
Amith Koujalgi
e43bd3acb4 Refactored the package structure to use io.github.ollama4j instead of the old naming io.github.amithkoujalgi.ollama4j.core
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:37:47 +05:30
Amith Koujalgi
0b041f4340 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:59:23 +05:30
Amith Koujalgi
6c6062b757 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:58:24 +05:30
Amith Koujalgi
68fd8b7cc8 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:57:38 +05:30
Amith Koujalgi
bb6f8aa343 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:56:36 +05:30
Amith Koujalgi
12802be0bc updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:35:35 +05:30
Amith Koujalgi
bd56ccfef7 Update README.md 2024-07-27 02:45:36 +05:30
Amith Koujalgi
51563f276f updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:57:55 +05:30
Amith Koujalgi
6e282124bf updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:56:57 +05:30
87 changed files with 672 additions and 349 deletions

6
.gitignore vendored
View File

@@ -37,6 +37,8 @@ build/
### Mac OS ### ### Mac OS ###
.DS_Store .DS_Store
/.idea/ /.idea/
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
pom.xml.* pom.xml.*
release.properties release.properties
!.idea/icon.svg
src/main/java/io/github/ollama4j/localtests

18
.idea/icon.svg generated Normal file
View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
</svg>

After

Width:  |  Height:  |  Size: 6.1 KiB

122
README.md
View File

@@ -9,26 +9,33 @@ A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
Find more details on the [website](https://ollama4j.github.io/ollama4j/). Find more details on the [website](https://ollama4j.github.io/ollama4j/).
<div align="center">
![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j) ![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j) ![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j) ![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j)
![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social) ![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social)
![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j) ![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j)
[//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;) [//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;)
[//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;) [//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;)
[//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;) [//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;)
[//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;) [//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;)
[//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;) [//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;)
![GitHub Downloads (all assets, all releases)](https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads)
[//]: # (![GitHub Downloads &#40;all assets, all releases&#41;]&#40;https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads&#41;)
![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green) ![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green)
[![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j) [![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j)
![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg) ![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
</div>
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;) [//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;)
@@ -39,7 +46,7 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
- [How does it work?](#how-does-it-work) - [How does it work?](#how-does-it-work)
- [Requirements](#requirements) - [Requirements](#requirements)
- [Installation](#installation) - [Installation](#installation)
- [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management) - [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
- [Development](#development) - [Development](#development)
- [Contributions](#get-involved) - [Contributions](#get-involved)
@@ -63,21 +70,70 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange) ![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange)
[![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker]
[ollama-link]: https://ollama.ai/ <a href="https://ollama.com/" target="_blank">
<img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
</a>
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray <table>
<tr>
<td>
[ollama-docker]: https://hub.docker.com/r/ollama/ollama <a href="https://ollama.ai/" target="_blank">Local Installation</a>
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray </td>
<td>
<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
</td>
</tr>
<tr>
<td>
<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
Install on Linux
```shell
curl -fsSL https://ollama.com/install.sh | sh
```
</td>
<td>
CPU only
```shell
docker run -d -p 11434:11434 \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
NVIDIA GPU
```shell
docker run -d -p 11434:11434 \
--gpus=all \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
</td>
</tr>
</table>
## Installation ## Installation
> [!NOTE] > [!NOTE]
> We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with > We are now publishing the artifacts to both Maven Central and GitHub package repositories.
> publishing. Please update your repository settings to get latest version of Ollama4j.
> >
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version > Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
> according to your requirements. > according to your requirements.
@@ -88,18 +144,18 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link] [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
[ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases [ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20 [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
1In your Maven project, add this dependency: In your Maven project, add this dependency:
```xml ```xml
<dependency> <dependency>
<groupId>io.github.ollama4j</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.78</version> <version>1.0.79</version>
</dependency> </dependency>
``` ```
@@ -107,9 +163,9 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
[![][ollama4j-releases-shield]][ollama4j-releases-link] [![][ollama4j-releases-shield]][ollama4j-releases-link]
[ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview [ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20 [ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`: 1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
@@ -155,17 +211,17 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
<dependency> <dependency>
<groupId>io.github.ollama4j</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.78</version> <version>1.0.79</version>
</dependency> </dependency>
``` ```
##### For Gradle ### For Gradle
1. Add the dependency 1. Add the dependency
```groovy ```groovy
dependencies { dependencies {
implementation 'com.github.ollama4j:ollama4j:1.0.78' implementation 'com.github.ollama4j:ollama4j:1.0.79'
} }
``` ```
@@ -221,37 +277,13 @@ Newer artifacts are published via GitHub Actions CI workflow when a new release
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
server to translate all messages into a specfic target language. server to translate all messages into a specfic target language.
- https://github.com/liebki/ollama-translator - https://github.com/liebki/ollama-translator
- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
Ollama4j. https://github.com/ollama4j/ollama4j-web-ui
#### Traction #### Traction
[![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date) [![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date)
### Areas of improvement
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
snake-case conventions. (
possibly with Jackson-mapper's `@JsonProperty`)
- [x] Fix deprecated HTTP client code
- [x] Setup logging
- [x] Use lombok
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
- [ ] Support for function calling with models like Mistral
- [x] generate in sync mode
- [ ] generate in async mode
- [ ] Add custom headers to requests
- [x] Add additional params for `ask` APIs such as:
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
- [x] `system`: system prompt to (overrides what is defined in the Modelfile)
- [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- [x] `context`: the context parameter returned from a previous request, which can be used to keep a
short
conversational memory
- [x] `stream`: Add support for streaming responses from the model
- [ ] Add test cases
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
### Get Involved ### Get Involved
<div align="center"> <div align="center">
@@ -293,7 +325,7 @@ project.
<p align="center"> <p align="center">
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors"> <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" /> <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" alt=""/>
</a> </a>
</p> </p>

View File

@@ -58,9 +58,9 @@ elevate your projects.
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟 I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/ Find the full API spec here: https://ollama4j.github.io/ollama4j/
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/ Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io). Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).

View File

@@ -10,6 +10,8 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
After configuring basic authentication, all subsequent requests will include the Basic Auth header. After configuring basic authentication, all subsequent requests will include the Basic Auth header.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -36,8 +36,9 @@ from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ol
## Build an empty `Options` object ## Build an empty `Options` object
```java ```java
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
@@ -55,8 +56,8 @@ public class Main {
## Build the `Options` object with values ## Build the `Options` object with values
```java ```java
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {

View File

@@ -7,6 +7,8 @@ sidebar_position: 3
This API lets you check the reachability of Ollama server. This API lets you check the reachability of Ollama server.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -0,0 +1,30 @@
---
sidebar_position: 4
---
# PS
This API provides a list of running models and details about each model currently loaded into memory.
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
```java
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import java.io.IOException;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelsProcessResponse response = ollamaAPI.ps();
System.out.println(response);
}
}
```

View File

@@ -7,6 +7,8 @@ sidebar_position: 2
This API lets you set the request timeout for the Ollama client. This API lets you set the request timeout for the Ollama client.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client.
## Try asking a question about the model. ## Try asking a question about the model.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -10,6 +10,13 @@ information using the history of already asked questions and the respective answ
## Create a new conversation and use chat history to augment follow up questions ## Create a new conversation and use chat history to augment follow up questions
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -20,7 +27,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question // create first user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build(); .build();
// start conversation with model // start conversation with model
@@ -78,6 +85,14 @@ You will get a response similar to:
## Create a conversation where the answer is streamed ## Create a conversation where the answer is streamed
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -86,7 +101,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -113,7 +128,13 @@ You will get a response similar to:
## Use a simple Console Output Stream Handler ## Use a simple Console Output Stream Handler
```java ```java
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@@ -121,7 +142,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build(); .build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler); ollamaAPI.chat(requestModel, streamHandler);
@@ -132,6 +153,14 @@ public class Main {
## Create a new conversation with individual system prompt ## Create a new conversation with individual system prompt
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -142,7 +171,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create request with system-prompt (overriding the model defaults) and user question // create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -162,6 +191,16 @@ You will get a response similar to:
## Create a conversation about an image (requires model with image recognition skills) ## Create a conversation about an image (requires model with image recognition skills)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
import java.io.File;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -172,9 +211,10 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL) // Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequestModel requestModel = OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); List.of(
new File("/path/to/image"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel); OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse()); System.out.println("First answer: " + chatResult.getResponse());

View File

@@ -12,6 +12,10 @@ This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@@ -38,9 +42,7 @@ public class Main {
System.out.println("Complete Response:"); System.out.println("Complete Response:");
System.out.println("------------------------"); System.out.println("------------------------");
System.out.println(streamer.getResult()); System.out.println(streamer.getCompleteResponse());
} }
} }
``` ```
You will get a steaming response.

View File

@@ -12,6 +12,10 @@ Parameters:
- `prompt`: text to generate embeddings for - `prompt`: text to generate embeddings for
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -22,6 +22,14 @@ If you have this image downloaded and you pass the path to the downloaded image
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.File;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -32,7 +40,9 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA, OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
"What's in this image?", "What's in this image?",
List.of( List.of(
new File("/path/to/image"))); new File("/path/to/image")),
new OptionsBuilder().build()
);
System.out.println(result.getResponse()); System.out.println(result.getResponse());
} }
} }

View File

@@ -22,6 +22,13 @@ Passing the link of this image the following code:
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -32,7 +39,9 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA, OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
"What's in this image?", "What's in this image?",
List.of( List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")); "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build()
);
System.out.println(result.getResponse()); System.out.println(result.getResponse());
} }
} }

View File

@@ -29,6 +29,8 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Functions ### Create Functions
We can create static functions as our tools.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value. fuel price value.
@@ -50,6 +52,8 @@ public static String getCurrentWeather(Map<String, Object> arguments) {
} }
``` ```
Another way to create our tools is by creating classes by extending `ToolFunction`.
This function takes the argument `employee-name` and performs an operation with the argument and returns employee This function takes the argument `employee-name` and performs an operation with the argument and returns employee
details. details.
@@ -211,13 +215,13 @@ Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
### Full Example ### Full Example
```java ```java
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; import io.github.ollama4j.tools.OllamaToolsResult;
import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction; import io.github.ollama4j.tools.ToolFunction;
import io.github.amithkoujalgi.ollama4j.core.tools.Tools; import io.github.ollama4j.tools.Tools;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@@ -341,7 +345,7 @@ Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
:::: ::::
### Room for improvement ### Potential Improvements
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
registration. For example: registration. For example:

View File

@@ -16,6 +16,11 @@ to [this](/apis-extras/options-builder).
## Try asking a question about the model. ## Try asking a question about the model.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -44,6 +49,11 @@ You will get a response similar to:
## Try asking a question, receiving the answer streamed ## Try asking a question, receiving the answer streamed
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -80,6 +90,11 @@ You will get a response similar to:
## Try asking a question from general topics. ## Try asking a question from general topics.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -123,6 +138,12 @@ You'd then get a response from the model:
## Try asking for a Database query for your data schema. ## Try asking for a Database query for your data schema.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.SamplePrompts;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -8,13 +8,13 @@ This is designed for prompt engineering. It allows you to easily build the promp
inferences. inferences.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.PromptBuilder;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; public class Main {
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
public class AskPhi {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
@@ -42,7 +42,8 @@ public class AskPhi {
.addSeparator() .addSeparator()
.add("How do I read a file in Go and print its contents to stdout?"); .add("How do I read a file in Go and print its contents to stdout?");
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); boolean raw = false;
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
System.out.println(response.getResponse()); System.out.println(response.getResponse());
} }
} }

View File

@@ -9,6 +9,8 @@ This API lets you create a custom model on the Ollama server.
### Create a model from an existing Modelfile in the Ollama server ### Create a model from an existing Modelfile in the Ollama server
```java title="CreateModel.java" ```java title="CreateModel.java"
import io.github.ollama4j.OllamaAPI;
public class CreateModel { public class CreateModel {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,6 +7,8 @@ sidebar_position: 5
This API lets you create a delete a model from the Ollama server. This API lets you create a delete a model from the Ollama server.
```java title="DeleteModel.java" ```java title="DeleteModel.java"
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,6 +7,10 @@ sidebar_position: 3
This API lets you get the details of a model on the Ollama server. This API lets you get the details of a model on the Ollama server.
```java title="GetModelDetails.java" ```java title="GetModelDetails.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,6 +7,11 @@ sidebar_position: 1
This API lets you list available models on the Ollama server. This API lets you list available models on the Ollama server.
```java title="ListModels.java" ```java title="ListModels.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.Model;
import java.util.List;
public class ListModels { public class ListModels {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,10 +7,13 @@ sidebar_position: 2
This API lets you pull a model on the Ollama server. This API lets you pull a model on the Ollama server.
```java title="PullModel.java" ```java title="PullModel.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);

View File

@@ -78,9 +78,9 @@ Add the dependency to your project's `pom.xml`.
```xml ```xml
<dependency> <dependency>
<groupId>io.github.amithkoujalgi</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.27</version> <version>1.0.78</version>
</dependency> </dependency>
``` ```
@@ -116,6 +116,26 @@ or use other suitable implementations.
Create a new Java class in your project and add this code. Create a new Java class in your project and add this code.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
}
}
```
This uses the default Ollama host as `http://localhost:11434`.
Specify a different Ollama host that you want to connect to.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest { public class OllamaAPITest {
public static void main(String[] args) { public static void main(String[] args) {
@@ -127,7 +147,7 @@ public class OllamaAPITest {
boolean isOllamaServerReachable = ollamaAPI.ping(); boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server alive: " + isOllamaServerReachable); System.out.println("Is Ollama server running: " + isOllamaServerReachable);
} }
} }
``` ```

View File

@@ -58,6 +58,10 @@ const config = {
theme: { theme: {
customCss: './src/css/custom.css', customCss: './src/css/custom.css',
}, },
gtag: {
trackingID: 'G-G7FLH6FNDC',
anonymizeIP: false,
},
}), }),
], ],
], ],

View File

@@ -9,6 +9,7 @@
"version": "0.0.0", "version": "0.0.0",
"dependencies": { "dependencies": {
"@docusaurus/core": "^3.4.0", "@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",

View File

@@ -15,6 +15,7 @@
}, },
"dependencies": { "dependencies": {
"@docusaurus/core": "^3.4.0", "@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",

View File

@@ -1,46 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequestModel() {
}
public OllamaGenerateRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequestModel)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -1,21 +1,22 @@
package io.github.amithkoujalgi.ollama4j.core; package io.github.ollama4j;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolNotFoundException; import io.github.ollama4j.exceptions.ToolNotFoundException;
import io.github.amithkoujalgi.ollama4j.core.models.*; import io.github.ollama4j.models.chat.OllamaChatMessage;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.ps.ModelsProcessResponse;
import io.github.amithkoujalgi.ollama4j.core.models.request.*; import io.github.ollama4j.models.request.*;
import io.github.amithkoujalgi.ollama4j.core.tools.*; import io.github.ollama4j.models.response.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.tools.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import lombok.Setter; import lombok.Setter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -57,7 +58,14 @@ public class OllamaAPI {
private final ToolRegistry toolRegistry = new ToolRegistry(); private final ToolRegistry toolRegistry = new ToolRegistry();
/** /**
* Instantiates the Ollama API. * Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
**/
public OllamaAPI() {
this.host = "http://localhost:11434";
}
/**
* Instantiates the Ollama API with specified Ollama host address.
* *
* @param host the host address of Ollama server * @param host the host address of Ollama server
*/ */
@@ -110,6 +118,37 @@ public class OllamaAPI {
return statusCode == 200; return statusCode == 200;
} }
/**
* Provides a list of running models and details about each model currently loaded into memory.
*
* @return ModelsProcessResponse
*/
public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
String url = this.host + "/api/ps";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null;
try {
httpRequest =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
HttpResponse<String> response = null;
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper()
.readValue(responseString, ModelsProcessResponse.class);
} else {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/** /**
* List available models from Ollama server. * List available models from Ollama server.
* *
@@ -351,7 +390,7 @@ public class OllamaAPI {
*/ */
public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException { throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw); ollamaRequestModel.setRaw(raw);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
@@ -420,7 +459,7 @@ public class OllamaAPI {
* @return the ollama async result callback handle * @return the ollama async result callback handle
*/ */
public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) { public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw); ollamaRequestModel.setRaw(raw);
URI uri = URI.create(this.host + "/api/generate"); URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultStreamer ollamaAsyncResultStreamer = OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
@@ -450,7 +489,7 @@ public class OllamaAPI {
for (File imageFile : imageFiles) { for (File imageFile : imageFiles) {
images.add(encodeFileToBase64(imageFile)); images.add(encodeFileToBase64(imageFile));
} }
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
} }
@@ -486,7 +525,7 @@ public class OllamaAPI {
for (String imageURL : imageURLs) { for (String imageURL : imageURLs) {
images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
} }
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
} }
@@ -520,7 +559,7 @@ public class OllamaAPI {
} }
/** /**
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p> * <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
* *
@@ -530,12 +569,12 @@ public class OllamaAPI {
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
*/ */
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
return chat(request, null); return chat(request, null);
} }
/** /**
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p> * <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
* *
@@ -546,7 +585,7 @@ public class OllamaAPI {
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
*/ */
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
OllamaResult result; OllamaResult result;
if (streamHandler != null) { if (streamHandler != null) {
@@ -573,7 +612,7 @@ public class OllamaAPI {
} }
private OllamaResult generateSyncForOllamaRequestModel( private OllamaResult generateSyncForOllamaRequestModel(
OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException { throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateEndpointCaller requestCaller = OllamaGenerateEndpointCaller requestCaller =
new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class OllamaBaseException extends Exception { public class OllamaBaseException extends Exception {

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class ToolInvocationException extends Exception { public class ToolInvocationException extends Exception {

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class ToolNotFoundException extends Exception { public class ToolNotFoundException extends Exception {

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.impl; package io.github.ollama4j.impl;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler { public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer(); private final StringBuffer response = new StringBuffer();

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer; import io.github.ollama4j.utils.FileToBase64Serializer;
import java.util.List; import java.util.List;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.annotation.JsonValue;

View File

@@ -1,8 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
@@ -16,20 +17,20 @@ import lombok.Setter;
*/ */
@Getter @Getter
@Setter @Setter
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody { public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
private List<OllamaChatMessage> messages; private List<OllamaChatMessage> messages;
public OllamaChatRequestModel() {} public OllamaChatRequest() {}
public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) { public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
this.model = model; this.model = model;
this.messages = messages; this.messages = messages;
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (!(o instanceof OllamaChatRequestModel)) { if (!(o instanceof OllamaChatRequest)) {
return false; return false;
} }

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@@ -11,32 +11,32 @@ import java.util.stream.Collectors;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
/** /**
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
*/ */
public class OllamaChatRequestBuilder { public class OllamaChatRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
request = new OllamaChatRequestModel(model, messages); request = new OllamaChatRequest(model, messages);
} }
private OllamaChatRequestModel request; private OllamaChatRequest request;
public static OllamaChatRequestBuilder getInstance(String model){ public static OllamaChatRequestBuilder getInstance(String model){
return new OllamaChatRequestBuilder(model, new ArrayList<>()); return new OllamaChatRequestBuilder(model, new ArrayList<>());
} }
public OllamaChatRequestModel build(){ public OllamaChatRequest build(){
return request; return request;
} }
public void reset(){ public void reset(){
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
} }
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){ public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data; import lombok.Data;

View File

@@ -1,8 +1,8 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.response.OllamaResult;
/** /**
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
public class OllamaEmbeddingsRequestBuilder { public class OllamaEmbeddingsRequestBuilder {

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;

View File

@@ -0,0 +1,46 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequest() {
}
public OllamaGenerateRequest(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequest(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequest)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -1,24 +1,24 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
/** /**
* Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} * Helper class for creating {@link OllamaGenerateRequest}
* objects using the builder-pattern. * objects using the builder-pattern.
*/ */
public class OllamaGenerateRequestBuilder { public class OllamaGenerateRequestBuilder {
private OllamaGenerateRequestBuilder(String model, String prompt){ private OllamaGenerateRequestBuilder(String model, String prompt){
request = new OllamaGenerateRequestModel(model, prompt); request = new OllamaGenerateRequest(model, prompt);
} }
private OllamaGenerateRequestModel request; private OllamaGenerateRequest request;
public static OllamaGenerateRequestBuilder getInstance(String model){ public static OllamaGenerateRequestBuilder getInstance(String model){
return new OllamaGenerateRequestBuilder(model,""); return new OllamaGenerateRequestBuilder(model,"");
} }
public OllamaGenerateRequestModel build(){ public OllamaGenerateRequest build(){
return request; return request;
} }

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import java.util.function.Consumer; import java.util.function.Consumer;

View File

@@ -0,0 +1,63 @@
package io.github.ollama4j.models.ps;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelsProcessResponse {
@JsonProperty("models")
private List<ModelProcess> models;
@Data
@NoArgsConstructor
public static class ModelProcess {
@JsonProperty("name")
private String name;
@JsonProperty("model")
private String model;
@JsonProperty("size")
private long size;
@JsonProperty("digest")
private String digest;
@JsonProperty("details")
private ModelDetails details;
@JsonProperty("expires_at")
private String expiresAt; // Consider using LocalDateTime if you need to process date/time
@JsonProperty("size_vram")
private long sizeVram;
}
@Data
@NoArgsConstructor
public static class ModelDetails {
@JsonProperty("parent_model")
private String parentModel;
@JsonProperty("format")
private String format;
@JsonProperty("family")
private String family;
@JsonProperty("families")
private List<String> families;
@JsonProperty("parameter_size")
private String parameterSize;
@JsonProperty("quantization_level")
private String quantizationLevel;
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.request;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,14 +1,13 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.chat.OllamaChatResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.request;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
@@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer; import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data
@JsonInclude(JsonInclude.Include.NON_NULL) @JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class OllamaCommonRequestModel { public abstract class OllamaCommonRequest {
protected String model; protected String model;
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)

View File

@@ -1,12 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -78,19 +77,19 @@ public abstract class OllamaEndpointCaller {
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
if (statusCode == 404) { if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)"); LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) { } else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)"); LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper() Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) { } else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)"); LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class); OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else { } else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);

View File

@@ -1,14 +1,13 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -1,6 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import java.util.List; import java.util.List;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,11 +1,10 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import java.time.LocalDateTime;
import java.time.OffsetDateTime; import java.time.OffsetDateTime;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,9 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,9 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data; import lombok.Data;

View File

@@ -1,10 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data; import lombok.Data;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import lombok.Getter; import lombok.Getter;
@@ -25,7 +24,7 @@ import java.time.Duration;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class OllamaAsyncResultStreamer extends Thread { public class OllamaAsyncResultStreamer extends Thread {
private final HttpRequest.Builder requestBuilder; private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequestModel ollamaRequestModel; private final OllamaGenerateRequest ollamaRequestModel;
private final OllamaResultStream stream = new OllamaResultStream(); private final OllamaResultStream stream = new OllamaResultStream();
private String completeResponse; private String completeResponse;
@@ -56,7 +55,7 @@ public class OllamaAsyncResultStreamer extends Thread {
public OllamaAsyncResultStreamer( public OllamaAsyncResultStreamer(
HttpRequest.Builder requestBuilder, HttpRequest.Builder requestBuilder,
OllamaGenerateRequestModel ollamaRequestModel, OllamaGenerateRequest ollamaRequestModel,
long requestTimeoutSeconds) { long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder; this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel; this.ollamaRequestModel = ollamaRequestModel;
@@ -91,8 +90,8 @@ public class OllamaAsyncResultStreamer extends Thread {
StringBuilder responseBuffer = new StringBuilder(); StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
if (statusCode == 404) { if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
stream.add(ollamaResponseModel.getError()); stream.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else { } else {

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data; import lombok.Data;
@Data @Data
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaErrorResponseModel { public class OllamaErrorResponse {
private String error; private String error;
} }

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core; package io.github.ollama4j.models.response;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.response.OllamaResult;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import java.util.Map; import java.util.Map;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.types; package io.github.ollama4j.types;
/** /**
* A class to provide constants for all the supported models by Ollama. * A class to provide constants for all the supported models by Ollama.

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.IOException; import java.io.IOException;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.IOException; import java.io.IOException;
import java.util.Base64; import java.util.Base64;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.net.http.HttpRequest.BodyPublisher; import java.net.http.HttpRequest.BodyPublisher;
import java.net.http.HttpRequest.BodyPublishers; import java.net.http.HttpRequest.BodyPublishers;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.util.Map; import java.util.Map;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.util.HashMap; import java.util.HashMap;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
/** /**
* The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It * The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import java.io.InputStream; import java.io.InputStream;
import java.util.Scanner; import java.util.Scanner;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;

View File

@@ -1,16 +1,16 @@
package io.github.amithkoujalgi.ollama4j.integrationtests; package io.github.ollama4j.integrationtests;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; import io.github.ollama4j.models.response.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
import lombok.Data; import lombok.Data;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Order;
@@ -177,7 +177,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
.withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
.build(); .build();
@@ -197,7 +197,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, .withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
@@ -219,7 +219,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -245,7 +245,7 @@ class TestRealAPIs {
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder builder =
OllamaChatRequestBuilder.getInstance(config.getImageModel()); OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
@@ -275,7 +275,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
.build(); .build();

View File

@@ -1,12 +1,12 @@
package io.github.amithkoujalgi.ollama4j.unittests; package io.github.ollama4j.unittests;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; import io.github.ollama4j.models.response.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer; import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; import io.github.ollama4j.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.Mockito; import org.mockito.Mockito;

View File

@@ -1,10 +1,10 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
public abstract class AbstractSerializationTest<T> { public abstract class AbstractSerializationTest<T> {

View File

@@ -1,20 +1,20 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.File; import java.io.File;
import java.util.List; import java.util.List;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import org.json.JSONObject; import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> { public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequest> {
private OllamaChatRequestBuilder builder; private OllamaChatRequestBuilder builder;
@@ -25,32 +25,32 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test @Test
public void testRequestOnlyMandatoryFields() { public void testRequestOnlyMandatoryFields() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestMultipleMessages() { public void testRequestMultipleMessages() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
.withMessage(OllamaChatMessageRole.USER, "Some prompt") .withMessage(OllamaChatMessageRole.USER, "Some prompt")
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestWithMessageAndImage() { public void testRequestWithMessageAndImage() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestWithOptions() { public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder(); OptionsBuilder b = new OptionsBuilder();
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setMirostat(1).build()) .withOptions(b.setMirostat(1).build())
.withOptions(b.setTemperature(1L).build()) .withOptions(b.setTemperature(1L).build())
.withOptions(b.setMirostatEta(1L).build()) .withOptions(b.setMirostatEta(1L).build())
@@ -62,7 +62,7 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class); OllamaChatRequest deserializeRequest = deserialize(jsonRequest, OllamaChatRequest.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat")); assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
@@ -76,7 +76,7 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test @Test
public void testWithJsonFormat() { public void testWithJsonFormat() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withGetJsonResponse().build(); .withGetJsonResponse().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
@@ -89,25 +89,25 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test @Test
public void testWithTemplate() { public void testWithTemplate() {
OllamaChatRequestModel req = builder.withTemplate("System Template") OllamaChatRequest req = builder.withTemplate("System Template")
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testWithStreaming() { public void testWithStreaming() {
OllamaChatRequestModel req = builder.withStreaming().build(); OllamaChatRequest req = builder.withStreaming().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true); assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).isStream(), true);
} }
@Test @Test
public void testWithKeepAlive() { public void testWithKeepAlive() {
String expectedKeepAlive = "5m"; String expectedKeepAlive = "5m";
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) OllamaChatRequest req = builder.withKeepAlive(expectedKeepAlive)
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).getKeepAlive(), expectedKeepAlive);
} }
} }

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> { public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {

View File

@@ -1,17 +1,17 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import org.json.JSONObject; import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder; import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> { public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequest> {
private OllamaGenerateRequestBuilder builder; private OllamaGenerateRequestBuilder builder;
@@ -22,27 +22,27 @@ public class TestGenerateRequestSerialization extends AbstractSerializationTest<
@Test @Test
public void testRequestOnlyMandatoryFields() { public void testRequestOnlyMandatoryFields() {
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
} }
@Test @Test
public void testRequestWithOptions() { public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder(); OptionsBuilder b = new OptionsBuilder();
OllamaGenerateRequestModel req = OllamaGenerateRequest req =
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class); OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat")); assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
} }
@Test @Test
public void testWithJsonFormat() { public void testWithJsonFormat() {
OllamaGenerateRequestModel req = OllamaGenerateRequest req =
builder.withPrompt("Some prompt").withGetJsonResponse().build(); builder.withPrompt("Some prompt").withGetJsonResponse().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import io.github.amithkoujalgi.ollama4j.core.models.Model; import io.github.ollama4j.models.response.Model;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> { public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {