Compare commits

..

67 Commits

Author SHA1 Message Date
Amith Koujalgi
82be761b86 Merge pull request #75 from ollama4j/model-listing
`listModelsFromLibrary` API
2024-11-07 23:54:56 +05:30
amithkoujalgi
9c3fc49df1 added listModelsFromLibrary API 2024-11-07 23:53:11 +05:30
Amith Koujalgi
5f19eb17ac Update OllamaAPI.java 2024-11-07 21:53:41 +05:30
Amith Koujalgi
ecb04d6d82 Cleanup 2024-10-31 21:22:17 +05:30
Amith Koujalgi
3fc7e9423c Updated docs 2024-10-31 17:57:02 +05:30
Amith Koujalgi
405a08b330 Updated docs 2024-10-31 16:25:05 +05:30
Amith Koujalgi
921f745435 Custom roles support
Adds support for custom roles using `OllamaChatMessageRole`
2024-10-31 16:15:21 +05:30
Amith Koujalgi
bedfec6bf9 Update generate-embeddings.md 2024-10-30 11:07:40 +05:30
Amith Koujalgi
afa09e87a5 Update OllamaAPI.java 2024-10-30 11:02:37 +05:30
Amith Koujalgi
baf2320ea6 Updated javadoc 2024-10-30 11:01:23 +05:30
Amith Koujalgi
948a7444fb Update README.md 2024-10-30 00:54:33 +05:30
Amith Koujalgi
ec0eb8b469 Update README.md 2024-10-30 00:44:08 +05:30
Amith Koujalgi
8f33de7e59 Update README.md 2024-10-30 00:43:09 +05:30
Amith Koujalgi
8c59e6511b Update README.md 2024-10-30 00:41:55 +05:30
Amith Koujalgi
b93fc7623a Updated javadoc 2024-10-30 00:28:53 +05:30
Amith Koujalgi
bd1a57c7e0 Added support for new embed API /api/embed 2024-10-30 00:03:49 +05:30
Amith Koujalgi
7fabead249 Merge pull request #73 from daguava/tool-role
Add 'tool' role
2024-10-29 22:05:43 +05:30
Mitchell Lutzke
268a973d5e Add tool role 2024-10-27 17:06:25 -07:00
Amith Koujalgi
d949a3cb69 Merge pull request #72 from daguava/minp-custom-options
Add minp option and ability to set custom options
2024-10-27 20:05:24 +05:30
Mitchell Lutzke
e2443ed68a Add throws to the docs 2024-10-26 22:08:03 -07:00
Mitchell Lutzke
37193b1f5b slight cleanup 2024-10-26 21:36:43 -07:00
Mitchell Lutzke
e33071ae38 Add minp option and ability to set custom options 2024-10-26 21:22:46 -07:00
Amith Koujalgi
fffc8dc526 Update README.md 2024-10-16 00:55:01 +05:30
Amith Koujalgi
def950cc9c Update README.md 2024-10-15 14:27:07 +05:30
Amith Koujalgi
f4db7ca326 Update README.md 2024-10-15 11:51:20 +05:30
Amith Koujalgi
18760250ea Update README.md 2024-10-11 21:21:20 +05:30
Amith Koujalgi
233597efd1 Update README.md 2024-10-01 23:38:49 +05:30
Amith Koujalgi
cec9f29eb7 Update README.md 2024-09-15 09:43:37 +05:30
Amith Koujalgi
20cb92a418 Update README.md 2024-09-15 08:48:31 +05:30
Amith Koujalgi
b0dc38954b Update README.md 2024-09-15 08:47:22 +05:30
Amith Koujalgi
1479d0a494 Update README.md 2024-09-08 16:28:46 +05:30
Amith Koujalgi
b328daee43 Update README.md 2024-09-08 16:28:23 +05:30
Amith Koujalgi
b90c8bc622 Docs build fixes
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 01:41:24 +05:30
Amith Koujalgi
660a1b937a Added llama3.1
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 01:24:13 +05:30
Amith Koujalgi
fd961d7037 Added BMC
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 00:48:50 +05:30
Amith Koujalgi
b48f9550c3 Update README.md 2024-09-04 00:10:09 +05:30
Amith Koujalgi
363969a275 Update README.md 2024-09-04 00:05:30 +05:30
Amith Koujalgi
992625cf86 Merge pull request #63 from peaster/gradle-dependency-fix
Change Gradle dependency example to use correct declaration
2024-08-10 10:20:09 +05:30
peaster
bbebd26d07 correct Gradle dependency example to use correct declaration 2024-08-09 16:24:43 -04:00
Amith Koujalgi
3aa0fc77cb Updated withMessages method of OllamaChatRequestBuilder to reset the messages 2024-08-09 01:39:21 +05:30
Amith Koujalgi
11a98a72a1 Updated docusaurus.config.js
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:22:37 +05:30
Amith Koujalgi
422601c0fc Updated ModelsProcessResponse class to support ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:11:15 +05:30
Amith Koujalgi
75e6576a13 Updated docs
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:00:54 +05:30
Amith Koujalgi
51dd3f3e1e Added Json ignore properties for ModelsProcessResponse
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 10:46:35 +05:30
Amith Koujalgi
30250f79d9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-28 00:06:55 +05:30
Amith Koujalgi
d4ee9ed051 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:55:21 +05:30
Amith Koujalgi
4412ac683a updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:53:54 +05:30
Amith Koujalgi
b5b1a26941 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:52:00 +05:30
Amith Koujalgi
a84230bbd1 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:30:53 +05:30
Amith Koujalgi
00c9b16556 Refactored classes into request and response packages
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:20:18 +05:30
Amith Koujalgi
9a2194334f Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:58:53 +05:30
Amith Koujalgi
f9cf11ecdf Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:54:04 +05:30
Amith Koujalgi
0af80865c3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:59:15 +05:30
Amith Koujalgi
a304c01194 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:58:43 +05:30
Amith Koujalgi
887708864e updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:56:19 +05:30
Amith Koujalgi
2f0c4fdcc9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:54:34 +05:30
Amith Koujalgi
73aabd7ca6 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:48:05 +05:30
Amith Koujalgi
17ca2bdee3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:46:53 +05:30
Amith Koujalgi
e43bd3acb4 Refactored the package structure to use io.github.ollama4j instead of the old naming io.github.amithkoujalgi.ollama4j.core
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:37:47 +05:30
Amith Koujalgi
0b041f4340 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:59:23 +05:30
Amith Koujalgi
6c6062b757 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:58:24 +05:30
Amith Koujalgi
68fd8b7cc8 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:57:38 +05:30
Amith Koujalgi
bb6f8aa343 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:56:36 +05:30
Amith Koujalgi
12802be0bc updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:35:35 +05:30
Amith Koujalgi
bd56ccfef7 Update README.md 2024-07-27 02:45:36 +05:30
Amith Koujalgi
51563f276f updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:57:55 +05:30
Amith Koujalgi
6e282124bf updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:56:57 +05:30
99 changed files with 3115 additions and 739 deletions

6
.gitignore vendored
View File

@@ -37,6 +37,8 @@ build/
### Mac OS ### ### Mac OS ###
.DS_Store .DS_Store
/.idea/ /.idea/
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
pom.xml.* pom.xml.*
release.properties release.properties
!.idea/icon.svg
src/main/java/io/github/ollama4j/localtests

18
.idea/icon.svg generated Normal file
View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
</svg>

After

Width:  |  Height:  |  Size: 6.1 KiB

View File

@@ -11,9 +11,9 @@ doxygen:
doxygen Doxyfile doxygen Doxyfile
list-releases: list-releases:
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \ curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \
--compressed \ --compressed \
--silent | jq '.components[].version' --silent | jq -r '.components[].version'
build-docs: build-docs:
npm i --prefix docs && npm run build --prefix docs npm i --prefix docs && npm run build --prefix docs

162
README.md
View File

@@ -4,31 +4,37 @@
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> <img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
</p> </p>
<div align="center">
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. A Java library (wrapper/binding) for Ollama server.
Find more details on the [website](https://ollama4j.github.io/ollama4j/). Find more details on the [website](https://ollama4j.github.io/ollama4j/).
![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j) ![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j) ![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j) ![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j)
![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social) ![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social)
![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j) ![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j)
[//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;) [//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;)
[//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;) [//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;)
[//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;) [//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;)
[//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;) [//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;)
[//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;) [//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;)
![GitHub Downloads (all assets, all releases)](https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads)
[//]: # (![GitHub Downloads &#40;all assets, all releases&#41;]&#40;https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads&#41;)
![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green) ![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green)
[![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j) [![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j)
![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg) ![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
</div>
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;) [//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;)
@@ -39,7 +45,7 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
- [How does it work?](#how-does-it-work) - [How does it work?](#how-does-it-work)
- [Requirements](#requirements) - [Requirements](#requirements)
- [Installation](#installation) - [Installation](#installation)
- [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management) - [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
- [Development](#development) - [Development](#development)
- [Contributions](#get-involved) - [Contributions](#get-involved)
@@ -63,21 +69,70 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange) ![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange)
[![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker]
[ollama-link]: https://ollama.ai/ <a href="https://ollama.com/" target="_blank">
<img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
</a>
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray <table>
<tr>
<td>
[ollama-docker]: https://hub.docker.com/r/ollama/ollama <a href="https://ollama.ai/" target="_blank">Local Installation</a>
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray </td>
<td>
<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
</td>
</tr>
<tr>
<td>
<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
Install on Linux
```shell
curl -fsSL https://ollama.com/install.sh | sh
```
</td>
<td>
CPU only
```shell
docker run -d -p 11434:11434 \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
NVIDIA GPU
```shell
docker run -d -p 11434:11434 \
--gpus=all \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
</td>
</tr>
</table>
## Installation ## Installation
> [!NOTE] > [!NOTE]
> We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with > We are now publishing the artifacts to both Maven Central and GitHub package repositories.
> publishing. Please update your repository settings to get latest version of Ollama4j.
> >
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version > Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
> according to your requirements. > according to your requirements.
@@ -88,18 +143,18 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link] [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
[ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases [ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20 [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
1In your Maven project, add this dependency: In your Maven project, add this dependency:
```xml ```xml
<dependency> <dependency>
<groupId>io.github.ollama4j</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.78</version> <version>1.0.79</version>
</dependency> </dependency>
``` ```
@@ -107,9 +162,9 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
[![][ollama4j-releases-shield]][ollama4j-releases-link] [![][ollama4j-releases-shield]][ollama4j-releases-link]
[ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview [ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20 [ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`: 1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
@@ -155,17 +210,17 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
<dependency> <dependency>
<groupId>io.github.ollama4j</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.78</version> <version>1.0.79</version>
</dependency> </dependency>
``` ```
##### For Gradle ### For Gradle
1. Add the dependency 1. Add the dependency
```groovy ```groovy
dependencies { dependencies {
implementation 'com.github.ollama4j:ollama4j:1.0.78' implementation 'io.github.ollama4j:ollama4j:1.0.79'
} }
``` ```
@@ -212,7 +267,10 @@ make integration-tests
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch. Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
#### Who's using Ollama4j? ## ⭐ Give us a Star!
If you like or are using this project to build your own, please give us a star. It's a free way to show your support.
## Who's using Ollama4j?
- `Datafaker`: a library to generate fake data - `Datafaker`: a library to generate fake data
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
@@ -221,38 +279,26 @@ Newer artifacts are published via GitHub Actions CI workflow when a new release
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
server to translate all messages into a specfic target language. server to translate all messages into a specfic target language.
- https://github.com/liebki/ollama-translator - https://github.com/liebki/ollama-translator
- `AI Player`: A minecraft mod which aims to add a "second player" into the game which will actually be intelligent.
- https://github.com/shasankp000/AI-Player
- https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/
- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
Ollama4j.
- https://github.com/ollama4j/ollama4j-web-ui
- `JnsCLI`: A command-line tool for Jenkins that manages jobs, builds, and configurations directly from the terminal while offering AI-powered error analysis for quick troubleshooting.
- https://github.com/mirum8/jnscli
- `Katie Backend`: An Open Source AI-based question-answering platform that helps companies and organizations make their private domain knowledge accessible and useful to their employees and customers.
- https://github.com/wyona/katie-backend
- `TeleLlama3 Bot`: A Question-Answering Telegram Bot.
- https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot
- `moqui-wechat`: A wechat plugin
- https://github.com/heguangyong/moqui-wechat
#### Traction ## Traction
[![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date) [![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date)
### Areas of improvement ## Get Involved
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
snake-case conventions. (
possibly with Jackson-mapper's `@JsonProperty`)
- [x] Fix deprecated HTTP client code
- [x] Setup logging
- [x] Use lombok
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
- [ ] Support for function calling with models like Mistral
- [x] generate in sync mode
- [ ] generate in async mode
- [ ] Add custom headers to requests
- [x] Add additional params for `ask` APIs such as:
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
- [x] `system`: system prompt to (overrides what is defined in the Modelfile)
- [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- [x] `context`: the context parameter returned from a previous request, which can be used to keep a
short
conversational memory
- [x] `stream`: Add support for streaming responses from the model
- [ ] Add test cases
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
### Get Involved
<div align="center"> <div align="center">
@@ -280,6 +326,22 @@ Contributions are most welcome! Whether it's reporting a bug, proposing an enhan
with code - any sort with code - any sort
of contribution is much appreciated. of contribution is much appreciated.
## 🏷️ License and Citation
The code is available under [MIT License](./LICENSE).
If you find this project helpful in your research, please cite this work at
```
@misc{ollama4j2024,
author = {Amith Koujalgi},
title = {Ollama4j: A Java Library (Wrapper/Binding) for Ollama Server},
year = {2024},
month = {January},
url = {https://github.com/ollama4j/ollama4j}
}
```
### References ### References
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
@@ -293,7 +355,7 @@ project.
<p align="center"> <p align="center">
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors"> <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" /> <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" alt=""/>
</a> </a>
</p> </p>

View File

@@ -58,9 +58,9 @@ elevate your projects.
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟 I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/ Find the full API spec here: https://ollama4j.github.io/ollama4j/
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/ Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io). Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).

View File

@@ -10,6 +10,8 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
After configuring basic authentication, all subsequent requests will include the Basic Auth header. After configuring basic authentication, all subsequent requests will include the Basic Auth header.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -36,8 +36,9 @@ from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ol
## Build an empty `Options` object ## Build an empty `Options` object
```java ```java
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
@@ -55,8 +56,8 @@ public class Main {
## Build the `Options` object with values ## Build the `Options` object with values
```java ```java
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {

View File

@@ -7,6 +7,8 @@ sidebar_position: 3
This API lets you check the reachability of Ollama server. This API lets you check the reachability of Ollama server.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -0,0 +1,30 @@
---
sidebar_position: 4
---
# PS
This API provides a list of running models and details about each model currently loaded into memory.
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
```java
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import java.io.IOException;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelsProcessResponse response = ollamaAPI.ps();
System.out.println(response);
}
}
```

View File

@@ -7,6 +7,8 @@ sidebar_position: 2
This API lets you set the request timeout for the Ollama client. This API lets you set the request timeout for the Ollama client.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client.
## Try asking a question about the model. ## Try asking a question about the model.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -10,6 +10,13 @@ information using the history of already asked questions and the respective answ
## Create a new conversation and use chat history to augment follow up questions ## Create a new conversation and use chat history to augment follow up questions
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -20,7 +27,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question // create first user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build(); .build();
// start conversation with model // start conversation with model
@@ -75,9 +82,44 @@ You will get a response similar to:
] ]
``` ```
## Conversational loop
```java
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
ollamaAPI.setRequestTimeoutSeconds(60);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
System.out.println(initialChatResult.getResponse());
List<OllamaChatMessage> history = initialChatResult.getChatHistory();
while (true) {
OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
System.out.println(chatResult.getResponse());
history = chatResult.getChatHistory();
}
}
}
```
## Create a conversation where the answer is streamed ## Create a conversation where the answer is streamed
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -86,7 +128,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -113,7 +155,13 @@ You will get a response similar to:
## Use a simple Console Output Stream Handler ## Use a simple Console Output Stream Handler
```java ```java
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@@ -121,7 +169,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build(); .build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler); ollamaAPI.chat(requestModel, streamHandler);
@@ -132,6 +180,14 @@ public class Main {
## Create a new conversation with individual system prompt ## Create a new conversation with individual system prompt
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -142,7 +198,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create request with system-prompt (overriding the model defaults) and user question // create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -162,6 +218,16 @@ You will get a response similar to:
## Create a conversation about an image (requires model with image recognition skills) ## Create a conversation about an image (requires model with image recognition skills)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
import java.io.File;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -172,9 +238,10 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL) // Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequestModel requestModel = OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); List.of(
new File("/path/to/image"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel); OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse()); System.out.println("First answer: " + chatResult.getResponse());

View File

@@ -0,0 +1,65 @@
---
sidebar_position: 8
---
# Custom Roles
Allows to manage custom roles (apart from the base roles) for chat interactions with the models.
_Particularly helpful when you would need to use different roles that the newer models support other than the base
roles._
_Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._
### Usage
#### Add new role
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role");
}
}
```
#### List roles
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<OllamaChatMessageRole> roles = ollamaAPI.listRoles();
}
}
```
#### Get role
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role");
}
}
```

View File

@@ -12,6 +12,10 @@ This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
@@ -38,9 +42,7 @@ public class Main {
System.out.println("Complete Response:"); System.out.println("Complete Response:");
System.out.println("------------------------"); System.out.println("------------------------");
System.out.println(streamer.getResult()); System.out.println(streamer.getCompleteResponse());
} }
} }
``` ```
You will get a steaming response.

View File

@@ -8,10 +8,87 @@ Generate embeddings from a model.
Parameters: Parameters:
- `model`: name of model to generate embeddings from
- `input`: text/s to generate embeddings for
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaEmbedResponseModel embeddings = ollamaAPI.embed("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
System.out.println(embeddings);
}
}
```
Or, using the `OllamaEmbedRequestModel`:
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaEmbedResponseModel embeddings = ollamaAPI.embed(new OllamaEmbedRequestModel("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?")));
System.out.println(embeddings);
}
}
```
You will get a response similar to:
```json
{
"model": "all-minilm",
"embeddings": [[-0.034674067, 0.030984823, 0.0067988685]],
"total_duration": 14173700,
"load_duration": 1198800,
"prompt_eval_count": 2
}
````
:::note
This is a deprecated API
:::
Parameters:
- `model`: name of model to generate embeddings from - `model`: name of model to generate embeddings from
- `prompt`: text to generate embeddings for - `prompt`: text to generate embeddings for
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -36,11 +113,6 @@ You will get a response similar to:
0.009260174818336964, 0.009260174818336964,
0.23178744316101074, 0.23178744316101074,
-0.2916173040866852, -0.2916173040866852,
-0.8924556970596313, -0.8924556970596313
0.8785552978515625,
-0.34576427936553955,
0.5742510557174683,
-0.04222835972905159,
-0.137906014919281
] ]
``` ```

View File

@@ -22,6 +22,14 @@ If you have this image downloaded and you pass the path to the downloaded image
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.File;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -32,7 +40,9 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA, OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
"What's in this image?", "What's in this image?",
List.of( List.of(
new File("/path/to/image"))); new File("/path/to/image")),
new OptionsBuilder().build()
);
System.out.println(result.getResponse()); System.out.println(result.getResponse());
} }
} }

View File

@@ -22,6 +22,13 @@ Passing the link of this image the following code:
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.util.List;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -32,7 +39,9 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA, OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
"What's in this image?", "What's in this image?",
List.of( List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")); "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build()
);
System.out.println(result.getResponse()); System.out.println(result.getResponse());
} }
} }

View File

@@ -29,6 +29,8 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Functions ### Create Functions
We can create static functions as our tools.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value. fuel price value.
@@ -50,6 +52,8 @@ public static String getCurrentWeather(Map<String, Object> arguments) {
} }
``` ```
Another way to create our tools is by creating classes by extending `ToolFunction`.
This function takes the argument `employee-name` and performs an operation with the argument and returns employee This function takes the argument `employee-name` and performs an operation with the argument and returns employee
details. details.
@@ -211,13 +215,13 @@ Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
### Full Example ### Full Example
```java ```java
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; import io.github.ollama4j.tools.OllamaToolsResult;
import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction; import io.github.ollama4j.tools.ToolFunction;
import io.github.amithkoujalgi.ollama4j.core.tools.Tools; import io.github.ollama4j.tools.Tools;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@@ -341,7 +345,7 @@ Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
:::: ::::
### Room for improvement ### Potential Improvements
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
registration. For example: registration. For example:

View File

@@ -16,6 +16,11 @@ to [this](/apis-extras/options-builder).
## Try asking a question about the model. ## Try asking a question about the model.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -44,6 +49,11 @@ You will get a response similar to:
## Try asking a question, receiving the answer streamed ## Try asking a question, receiving the answer streamed
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -80,6 +90,11 @@ You will get a response similar to:
## Try asking a question from general topics. ## Try asking a question from general topics.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
@@ -123,6 +138,12 @@ You'd then get a response from the model:
## Try asking for a Database query for your data schema. ## Try asking for a Database query for your data schema.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.SamplePrompts;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -8,13 +8,13 @@ This is designed for prompt engineering. It allows you to easily build the promp
inferences. inferences.
```java ```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.PromptBuilder;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; public class Main {
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
public class AskPhi {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
@@ -42,7 +42,8 @@ public class AskPhi {
.addSeparator() .addSeparator()
.add("How do I read a file in Go and print its contents to stdout?"); .add("How do I read a file in Go and print its contents to stdout?");
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); boolean raw = false;
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
System.out.println(response.getResponse()); System.out.println(response.getResponse());
} }
} }

View File

@@ -9,6 +9,8 @@ This API lets you create a custom model on the Ollama server.
### Create a model from an existing Modelfile in the Ollama server ### Create a model from an existing Modelfile in the Ollama server
```java title="CreateModel.java" ```java title="CreateModel.java"
import io.github.ollama4j.OllamaAPI;
public class CreateModel { public class CreateModel {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,6 +7,8 @@ sidebar_position: 5
This API lets you create a delete a model from the Ollama server. This API lets you create a delete a model from the Ollama server.
```java title="DeleteModel.java" ```java title="DeleteModel.java"
import io.github.ollama4j.OllamaAPI;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,6 +7,10 @@ sidebar_position: 3
This API lets you get the details of a model on the Ollama server. This API lets you get the details of a model on the Ollama server.
```java title="GetModelDetails.java" ```java title="GetModelDetails.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,11 @@ sidebar_position: 1
This API lets you list available models on the Ollama server. This API lets you list available models on the Ollama server.
```java title="ListModels.java" ```java title="ListModels.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.Model;
import java.util.List;
public class ListModels { public class ListModels {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -7,10 +7,13 @@ sidebar_position: 2
This API lets you pull a model on the Ollama server. This API lets you pull a model on the Ollama server.
```java title="PullModel.java" ```java title="PullModel.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);

View File

@@ -78,9 +78,9 @@ Add the dependency to your project's `pom.xml`.
```xml ```xml
<dependency> <dependency>
<groupId>io.github.amithkoujalgi</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.27</version> <version>1.0.78</version>
</dependency> </dependency>
``` ```
@@ -116,6 +116,26 @@ or use other suitable implementations.
Create a new Java class in your project and add this code. Create a new Java class in your project and add this code.
```java ```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
}
}
```
This uses the default Ollama host as `http://localhost:11434`.
Specify a different Ollama host that you want to connect to.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest { public class OllamaAPITest {
public static void main(String[] args) { public static void main(String[] args) {
@@ -127,7 +147,7 @@ public class OllamaAPITest {
boolean isOllamaServerReachable = ollamaAPI.ping(); boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server alive: " + isOllamaServerReachable); System.out.println("Is Ollama server running: " + isOllamaServerReachable);
} }
} }
``` ```

View File

@@ -58,6 +58,10 @@ const config = {
theme: { theme: {
customCss: './src/css/custom.css', customCss: './src/css/custom.css',
}, },
gtag: {
trackingID: 'G-G7FLH6FNDC',
anonymizeIP: false,
},
}), }),
], ],
], ],

414
docs/package-lock.json generated
View File

@@ -9,6 +9,7 @@
"version": "0.0.0", "version": "0.0.0",
"dependencies": { "dependencies": {
"@docusaurus/core": "^3.4.0", "@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",
@@ -3382,24 +3383,6 @@
"@types/ms": "*" "@types/ms": "*"
} }
}, },
"node_modules/@types/eslint": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.0.tgz",
"integrity": "sha512-FlsN0p4FhuYRjIxpbdXovvHQhtlG05O1GG/RNWvdAxTboR438IOTwmrY/vLA+Xfgg06BTkP045M3vpFwTMv1dg==",
"dependencies": {
"@types/estree": "*",
"@types/json-schema": "*"
}
},
"node_modules/@types/eslint-scope": {
"version": "3.7.7",
"resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz",
"integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==",
"dependencies": {
"@types/eslint": "*",
"@types/estree": "*"
}
},
"node_modules/@types/estree": { "node_modules/@types/estree": {
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
@@ -3690,9 +3673,10 @@
"integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==" "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ=="
}, },
"node_modules/@webassemblyjs/ast": { "node_modules/@webassemblyjs/ast": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz",
"integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==", "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/helper-numbers": "1.11.6", "@webassemblyjs/helper-numbers": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6" "@webassemblyjs/helper-wasm-bytecode": "1.11.6"
@@ -3701,22 +3685,26 @@
"node_modules/@webassemblyjs/floating-point-hex-parser": { "node_modules/@webassemblyjs/floating-point-hex-parser": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz",
"integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==" "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==",
"license": "MIT"
}, },
"node_modules/@webassemblyjs/helper-api-error": { "node_modules/@webassemblyjs/helper-api-error": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz",
"integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==" "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==",
"license": "MIT"
}, },
"node_modules/@webassemblyjs/helper-buffer": { "node_modules/@webassemblyjs/helper-buffer": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz",
"integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==" "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==",
"license": "MIT"
}, },
"node_modules/@webassemblyjs/helper-numbers": { "node_modules/@webassemblyjs/helper-numbers": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz",
"integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==", "integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/floating-point-hex-parser": "1.11.6", "@webassemblyjs/floating-point-hex-parser": "1.11.6",
"@webassemblyjs/helper-api-error": "1.11.6", "@webassemblyjs/helper-api-error": "1.11.6",
@@ -3726,23 +3714,26 @@
"node_modules/@webassemblyjs/helper-wasm-bytecode": { "node_modules/@webassemblyjs/helper-wasm-bytecode": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz",
"integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==" "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==",
"license": "MIT"
}, },
"node_modules/@webassemblyjs/helper-wasm-section": { "node_modules/@webassemblyjs/helper-wasm-section": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz",
"integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==", "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@webassemblyjs/helper-buffer": "1.11.6", "@webassemblyjs/helper-buffer": "1.12.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/wasm-gen": "1.11.6" "@webassemblyjs/wasm-gen": "1.12.1"
} }
}, },
"node_modules/@webassemblyjs/ieee754": { "node_modules/@webassemblyjs/ieee754": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz",
"integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==", "integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@xtuc/ieee754": "^1.2.0" "@xtuc/ieee754": "^1.2.0"
} }
@@ -3751,6 +3742,7 @@
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz",
"integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==", "integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==",
"license": "Apache-2.0",
"dependencies": { "dependencies": {
"@xtuc/long": "4.2.2" "@xtuc/long": "4.2.2"
} }
@@ -3758,29 +3750,32 @@
"node_modules/@webassemblyjs/utf8": { "node_modules/@webassemblyjs/utf8": {
"version": "1.11.6", "version": "1.11.6",
"resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz",
"integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==" "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==",
"license": "MIT"
}, },
"node_modules/@webassemblyjs/wasm-edit": { "node_modules/@webassemblyjs/wasm-edit": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz",
"integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==", "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@webassemblyjs/helper-buffer": "1.11.6", "@webassemblyjs/helper-buffer": "1.12.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/helper-wasm-section": "1.11.6", "@webassemblyjs/helper-wasm-section": "1.12.1",
"@webassemblyjs/wasm-gen": "1.11.6", "@webassemblyjs/wasm-gen": "1.12.1",
"@webassemblyjs/wasm-opt": "1.11.6", "@webassemblyjs/wasm-opt": "1.12.1",
"@webassemblyjs/wasm-parser": "1.11.6", "@webassemblyjs/wasm-parser": "1.12.1",
"@webassemblyjs/wast-printer": "1.11.6" "@webassemblyjs/wast-printer": "1.12.1"
} }
}, },
"node_modules/@webassemblyjs/wasm-gen": { "node_modules/@webassemblyjs/wasm-gen": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz",
"integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==", "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6", "@webassemblyjs/ieee754": "1.11.6",
"@webassemblyjs/leb128": "1.11.6", "@webassemblyjs/leb128": "1.11.6",
@@ -3788,22 +3783,24 @@
} }
}, },
"node_modules/@webassemblyjs/wasm-opt": { "node_modules/@webassemblyjs/wasm-opt": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz",
"integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==", "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@webassemblyjs/helper-buffer": "1.11.6", "@webassemblyjs/helper-buffer": "1.12.1",
"@webassemblyjs/wasm-gen": "1.11.6", "@webassemblyjs/wasm-gen": "1.12.1",
"@webassemblyjs/wasm-parser": "1.11.6" "@webassemblyjs/wasm-parser": "1.12.1"
} }
}, },
"node_modules/@webassemblyjs/wasm-parser": { "node_modules/@webassemblyjs/wasm-parser": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz",
"integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==", "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@webassemblyjs/helper-api-error": "1.11.6", "@webassemblyjs/helper-api-error": "1.11.6",
"@webassemblyjs/helper-wasm-bytecode": "1.11.6", "@webassemblyjs/helper-wasm-bytecode": "1.11.6",
"@webassemblyjs/ieee754": "1.11.6", "@webassemblyjs/ieee754": "1.11.6",
@@ -3812,23 +3809,26 @@
} }
}, },
"node_modules/@webassemblyjs/wast-printer": { "node_modules/@webassemblyjs/wast-printer": {
"version": "1.11.6", "version": "1.12.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz", "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz",
"integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==", "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==",
"license": "MIT",
"dependencies": { "dependencies": {
"@webassemblyjs/ast": "1.11.6", "@webassemblyjs/ast": "1.12.1",
"@xtuc/long": "4.2.2" "@xtuc/long": "4.2.2"
} }
}, },
"node_modules/@xtuc/ieee754": { "node_modules/@xtuc/ieee754": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz",
"integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==",
"license": "BSD-3-Clause"
}, },
"node_modules/@xtuc/long": { "node_modules/@xtuc/long": {
"version": "4.2.2", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz",
"integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==",
"license": "Apache-2.0"
}, },
"node_modules/accepts": { "node_modules/accepts": {
"version": "1.3.8", "version": "1.3.8",
@@ -3872,10 +3872,11 @@
"node": ">=0.4.0" "node": ">=0.4.0"
} }
}, },
"node_modules/acorn-import-assertions": { "node_modules/acorn-import-attributes": {
"version": "1.9.0", "version": "1.9.5",
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz",
"integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==",
"license": "MIT",
"peerDependencies": { "peerDependencies": {
"acorn": "^8" "acorn": "^8"
} }
@@ -4241,12 +4242,13 @@
} }
}, },
"node_modules/body-parser": { "node_modules/body-parser": {
"version": "1.20.1", "version": "1.20.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
"license": "MIT",
"dependencies": { "dependencies": {
"bytes": "3.1.2", "bytes": "3.1.2",
"content-type": "~1.0.4", "content-type": "~1.0.5",
"debug": "2.6.9", "debug": "2.6.9",
"depd": "2.0.0", "depd": "2.0.0",
"destroy": "1.2.0", "destroy": "1.2.0",
@@ -4254,7 +4256,7 @@
"iconv-lite": "0.4.24", "iconv-lite": "0.4.24",
"on-finished": "2.4.1", "on-finished": "2.4.1",
"qs": "6.11.0", "qs": "6.11.0",
"raw-body": "2.5.1", "raw-body": "2.5.2",
"type-is": "~1.6.18", "type-is": "~1.6.18",
"unpipe": "1.0.0" "unpipe": "1.0.0"
}, },
@@ -4267,6 +4269,7 @@
"version": "3.1.2", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.8" "node": ">= 0.8"
} }
@@ -4275,6 +4278,7 @@
"version": "2.6.9", "version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": { "dependencies": {
"ms": "2.0.0" "ms": "2.0.0"
} }
@@ -4282,7 +4286,8 @@
"node_modules/body-parser/node_modules/ms": { "node_modules/body-parser/node_modules/ms": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
}, },
"node_modules/bonjour-service": { "node_modules/bonjour-service": {
"version": "1.1.1", "version": "1.1.1",
@@ -4331,11 +4336,12 @@
} }
}, },
"node_modules/braces": { "node_modules/braces": {
"version": "3.0.2", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"license": "MIT",
"dependencies": { "dependencies": {
"fill-range": "^7.0.1" "fill-range": "^7.1.1"
}, },
"engines": { "engines": {
"node": ">=8" "node": ">=8"
@@ -4422,13 +4428,19 @@
} }
}, },
"node_modules/call-bind": { "node_modules/call-bind": {
"version": "1.0.5", "version": "1.0.7",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
"integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
"license": "MIT",
"dependencies": { "dependencies": {
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
"function-bind": "^1.1.2", "function-bind": "^1.1.2",
"get-intrinsic": "^1.2.1", "get-intrinsic": "^1.2.4",
"set-function-length": "^1.1.1" "set-function-length": "^1.2.1"
},
"engines": {
"node": ">= 0.4"
}, },
"funding": { "funding": {
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
@@ -4922,6 +4934,7 @@
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
} }
@@ -4932,9 +4945,10 @@
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="
}, },
"node_modules/cookie": { "node_modules/cookie": {
"version": "0.5.0", "version": "0.6.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
"integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
} }
@@ -5961,16 +5975,20 @@
} }
}, },
"node_modules/define-data-property": { "node_modules/define-data-property": {
"version": "1.1.1", "version": "1.1.4",
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
"integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
"license": "MIT",
"dependencies": { "dependencies": {
"get-intrinsic": "^1.2.1", "es-define-property": "^1.0.0",
"gopd": "^1.0.1", "es-errors": "^1.3.0",
"has-property-descriptors": "^1.0.0" "gopd": "^1.0.1"
}, },
"engines": { "engines": {
"node": ">= 0.4" "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/define-lazy-prop": { "node_modules/define-lazy-prop": {
@@ -6301,9 +6319,10 @@
} }
}, },
"node_modules/enhanced-resolve": { "node_modules/enhanced-resolve": {
"version": "5.15.0", "version": "5.17.1",
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz",
"integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==",
"license": "MIT",
"dependencies": { "dependencies": {
"graceful-fs": "^4.2.4", "graceful-fs": "^4.2.4",
"tapable": "^2.2.0" "tapable": "^2.2.0"
@@ -6331,6 +6350,27 @@
"is-arrayish": "^0.2.1" "is-arrayish": "^0.2.1"
} }
}, },
"node_modules/es-define-property": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
"integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
"license": "MIT",
"dependencies": {
"get-intrinsic": "^1.2.4"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-module-lexer": { "node_modules/es-module-lexer": {
"version": "1.4.1", "version": "1.4.1",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz",
@@ -6579,16 +6619,17 @@
} }
}, },
"node_modules/express": { "node_modules/express": {
"version": "4.18.2", "version": "4.19.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
"integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
"license": "MIT",
"dependencies": { "dependencies": {
"accepts": "~1.3.8", "accepts": "~1.3.8",
"array-flatten": "1.1.1", "array-flatten": "1.1.1",
"body-parser": "1.20.1", "body-parser": "1.20.2",
"content-disposition": "0.5.4", "content-disposition": "0.5.4",
"content-type": "~1.0.4", "content-type": "~1.0.4",
"cookie": "0.5.0", "cookie": "0.6.0",
"cookie-signature": "1.0.6", "cookie-signature": "1.0.6",
"debug": "2.6.9", "debug": "2.6.9",
"depd": "2.0.0", "depd": "2.0.0",
@@ -6825,9 +6866,10 @@
} }
}, },
"node_modules/fill-range": { "node_modules/fill-range": {
"version": "7.0.1", "version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"license": "MIT",
"dependencies": { "dependencies": {
"to-regex-range": "^5.0.1" "to-regex-range": "^5.0.1"
}, },
@@ -6904,15 +6946,16 @@
} }
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.15.3", "version": "1.15.8",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.8.tgz",
"integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", "integrity": "sha512-xgrmBhBToVKay1q2Tao5LI26B83UhrB/vM1avwVSDzt8rx3rO6AizBAaF46EgksTVr+rFTQaqZZ9MVBfUe4nig==",
"funding": [ "funding": [
{ {
"type": "individual", "type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh" "url": "https://github.com/sponsors/RubenVerborgh"
} }
], ],
"license": "MIT",
"engines": { "engines": {
"node": ">=4.0" "node": ">=4.0"
}, },
@@ -7139,15 +7182,20 @@
} }
}, },
"node_modules/get-intrinsic": { "node_modules/get-intrinsic": {
"version": "1.2.2", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
"integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
"license": "MIT",
"dependencies": { "dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2", "function-bind": "^1.1.2",
"has-proto": "^1.0.1", "has-proto": "^1.0.1",
"has-symbols": "^1.0.3", "has-symbols": "^1.0.3",
"hasown": "^2.0.0" "hasown": "^2.0.0"
}, },
"engines": {
"node": ">= 0.4"
},
"funding": { "funding": {
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
@@ -7206,7 +7254,8 @@
"node_modules/glob-to-regexp": { "node_modules/glob-to-regexp": {
"version": "0.4.1", "version": "0.4.1",
"resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
"integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
"license": "BSD-2-Clause"
}, },
"node_modules/global-dirs": { "node_modules/global-dirs": {
"version": "3.0.1", "version": "3.0.1",
@@ -7405,11 +7454,12 @@
} }
}, },
"node_modules/has-property-descriptors": { "node_modules/has-property-descriptors": {
"version": "1.0.1", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
"integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
"license": "MIT",
"dependencies": { "dependencies": {
"get-intrinsic": "^1.2.2" "es-define-property": "^1.0.0"
}, },
"funding": { "funding": {
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
@@ -7949,6 +7999,7 @@
"version": "0.4.24", "version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"license": "MIT",
"dependencies": { "dependencies": {
"safer-buffer": ">= 2.1.2 < 3" "safer-buffer": ">= 2.1.2 < 3"
}, },
@@ -8258,6 +8309,7 @@
"version": "7.0.0", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"license": "MIT",
"engines": { "engines": {
"node": ">=0.12.0" "node": ">=0.12.0"
} }
@@ -9105,6 +9157,7 @@
"version": "0.3.0", "version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
} }
@@ -11285,11 +11338,12 @@
] ]
}, },
"node_modules/micromatch": { "node_modules/micromatch": {
"version": "4.0.5", "version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"license": "MIT",
"dependencies": { "dependencies": {
"braces": "^3.0.2", "braces": "^3.0.3",
"picomatch": "^2.3.1" "picomatch": "^2.3.1"
}, },
"engines": { "engines": {
@@ -11543,9 +11597,13 @@
} }
}, },
"node_modules/object-inspect": { "node_modules/object-inspect": {
"version": "1.13.1", "version": "1.13.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
"integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": { "funding": {
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
@@ -12684,6 +12742,7 @@
"version": "6.11.0", "version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
"integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
"license": "BSD-3-Clause",
"dependencies": { "dependencies": {
"side-channel": "^1.0.4" "side-channel": "^1.0.4"
}, },
@@ -12749,9 +12808,10 @@
} }
}, },
"node_modules/raw-body": { "node_modules/raw-body": {
"version": "2.5.1", "version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"license": "MIT",
"dependencies": { "dependencies": {
"bytes": "3.1.2", "bytes": "3.1.2",
"http-errors": "2.0.0", "http-errors": "2.0.0",
@@ -12766,6 +12826,7 @@
"version": "3.1.2", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.8" "node": ">= 0.8"
} }
@@ -13886,14 +13947,17 @@
} }
}, },
"node_modules/set-function-length": { "node_modules/set-function-length": {
"version": "1.1.1", "version": "1.2.2",
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
"integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
"license": "MIT",
"dependencies": { "dependencies": {
"define-data-property": "^1.1.1", "define-data-property": "^1.1.4",
"get-intrinsic": "^1.2.1", "es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
"gopd": "^1.0.1", "gopd": "^1.0.1",
"has-property-descriptors": "^1.0.0" "has-property-descriptors": "^1.0.2"
}, },
"engines": { "engines": {
"node": ">= 0.4" "node": ">= 0.4"
@@ -13964,13 +14028,18 @@
} }
}, },
"node_modules/side-channel": { "node_modules/side-channel": {
"version": "1.0.4", "version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"license": "MIT",
"dependencies": { "dependencies": {
"call-bind": "^1.0.0", "call-bind": "^1.0.7",
"get-intrinsic": "^1.0.2", "es-errors": "^1.3.0",
"object-inspect": "^1.9.0" "get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
},
"engines": {
"node": ">= 0.4"
}, },
"funding": { "funding": {
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
@@ -14393,15 +14462,16 @@
} }
}, },
"node_modules/terser-webpack-plugin": { "node_modules/terser-webpack-plugin": {
"version": "5.3.9", "version": "5.3.10",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz",
"integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==", "integrity": "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==",
"license": "MIT",
"dependencies": { "dependencies": {
"@jridgewell/trace-mapping": "^0.3.17", "@jridgewell/trace-mapping": "^0.3.20",
"jest-worker": "^27.4.5", "jest-worker": "^27.4.5",
"schema-utils": "^3.1.1", "schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.1", "serialize-javascript": "^6.0.1",
"terser": "^5.16.8" "terser": "^5.26.0"
}, },
"engines": { "engines": {
"node": ">= 10.13.0" "node": ">= 10.13.0"
@@ -14534,6 +14604,7 @@
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"license": "MIT",
"dependencies": { "dependencies": {
"is-number": "^7.0.0" "is-number": "^7.0.0"
}, },
@@ -14603,6 +14674,7 @@
"version": "1.6.18", "version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
"license": "MIT",
"dependencies": { "dependencies": {
"media-typer": "0.3.0", "media-typer": "0.3.0",
"mime-types": "~2.1.24" "mime-types": "~2.1.24"
@@ -14615,6 +14687,7 @@
"version": "1.52.0", "version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
} }
@@ -14623,6 +14696,7 @@
"version": "2.1.35", "version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": { "dependencies": {
"mime-db": "1.52.0" "mime-db": "1.52.0"
}, },
@@ -15154,9 +15228,10 @@
} }
}, },
"node_modules/watchpack": { "node_modules/watchpack": {
"version": "2.4.0", "version": "2.4.2",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
"integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==",
"license": "MIT",
"dependencies": { "dependencies": {
"glob-to-regexp": "^0.4.1", "glob-to-regexp": "^0.4.1",
"graceful-fs": "^4.1.2" "graceful-fs": "^4.1.2"
@@ -15188,33 +15263,33 @@
"integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA==" "integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA=="
}, },
"node_modules/webpack": { "node_modules/webpack": {
"version": "5.89.0", "version": "5.94.0",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.89.0.tgz", "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz",
"integrity": "sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw==", "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==",
"license": "MIT",
"dependencies": { "dependencies": {
"@types/eslint-scope": "^3.7.3", "@types/estree": "^1.0.5",
"@types/estree": "^1.0.0", "@webassemblyjs/ast": "^1.12.1",
"@webassemblyjs/ast": "^1.11.5", "@webassemblyjs/wasm-edit": "^1.12.1",
"@webassemblyjs/wasm-edit": "^1.11.5", "@webassemblyjs/wasm-parser": "^1.12.1",
"@webassemblyjs/wasm-parser": "^1.11.5",
"acorn": "^8.7.1", "acorn": "^8.7.1",
"acorn-import-assertions": "^1.9.0", "acorn-import-attributes": "^1.9.5",
"browserslist": "^4.14.5", "browserslist": "^4.21.10",
"chrome-trace-event": "^1.0.2", "chrome-trace-event": "^1.0.2",
"enhanced-resolve": "^5.15.0", "enhanced-resolve": "^5.17.1",
"es-module-lexer": "^1.2.1", "es-module-lexer": "^1.2.1",
"eslint-scope": "5.1.1", "eslint-scope": "5.1.1",
"events": "^3.2.0", "events": "^3.2.0",
"glob-to-regexp": "^0.4.1", "glob-to-regexp": "^0.4.1",
"graceful-fs": "^4.2.9", "graceful-fs": "^4.2.11",
"json-parse-even-better-errors": "^2.3.1", "json-parse-even-better-errors": "^2.3.1",
"loader-runner": "^4.2.0", "loader-runner": "^4.2.0",
"mime-types": "^2.1.27", "mime-types": "^2.1.27",
"neo-async": "^2.6.2", "neo-async": "^2.6.2",
"schema-utils": "^3.2.0", "schema-utils": "^3.2.0",
"tapable": "^2.1.1", "tapable": "^2.1.1",
"terser-webpack-plugin": "^5.3.7", "terser-webpack-plugin": "^5.3.10",
"watchpack": "^2.4.0", "watchpack": "^2.4.1",
"webpack-sources": "^3.2.3" "webpack-sources": "^3.2.3"
}, },
"bin": { "bin": {
@@ -15268,9 +15343,10 @@
} }
}, },
"node_modules/webpack-dev-middleware": { "node_modules/webpack-dev-middleware": {
"version": "5.3.3", "version": "5.3.4",
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz",
"integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==",
"license": "MIT",
"dependencies": { "dependencies": {
"colorette": "^2.0.10", "colorette": "^2.0.10",
"memfs": "^3.4.3", "memfs": "^3.4.3",
@@ -15375,9 +15451,10 @@
} }
}, },
"node_modules/webpack-dev-server/node_modules/ws": { "node_modules/webpack-dev-server/node_modules/ws": {
"version": "8.15.1", "version": "8.18.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.15.1.tgz", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",
"integrity": "sha512-W5OZiCjXEmk0yZ66ZN82beM5Sz7l7coYxpRkzS+p9PP+ToQry8szKh+61eNktr7EA9DOwvFGhfC605jDHbP6QQ==", "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==",
"license": "MIT",
"engines": { "engines": {
"node": ">=10.0.0" "node": ">=10.0.0"
}, },
@@ -15619,9 +15696,10 @@
} }
}, },
"node_modules/ws": { "node_modules/ws": {
"version": "7.5.9", "version": "7.5.10",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz",
"integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==",
"license": "MIT",
"engines": { "engines": {
"node": ">=8.3.0" "node": ">=8.3.0"
}, },

View File

@@ -15,6 +15,7 @@
}, },
"dependencies": { "dependencies": {
"@docusaurus/core": "^3.4.0", "@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",

View File

@@ -0,0 +1,41 @@
import React from "react";
class BuyMeACoffee extends React.Component {
constructor(props) {
super(props)
let script = document.createElement("script");
script.src = 'https://cdnjs.buymeacoffee.com/1.0.0/widget.prod.min.js';
script.dataset.name = 'BMC-Widget';
script.dataset.cfasync = 'false';
script.dataset.id = 'amithkoujalgi';
script.dataset.description = 'Support me on Buy me a coffee!';
script.dataset.message = 'If you like my work and want to say thanks, or encourage me to do more, you can buy me a coffee! 😊';
script.dataset.color = '#2e8555';
script.dataset.position = 'Right';
script.dataset.x_margin = '18';
script.dataset.y_margin = '18';
script.async = true
script.onload = function () {
let evt = document.createEvent('Event');
evt.initEvent('DOMContentLoaded', false, false);
window.dispatchEvent(evt);
}
this.script = script
}
componentDidMount() {
document.head.appendChild(this.script)
}
// componentWillUnmount() {
// document.head.removeChild(this.script);
// document.body.removeChild(document.getElementById("bmc-wbtn"))
// }
render() {
return null
}
}
export default BuyMeACoffee;

View File

@@ -37,4 +37,12 @@ div > h1,
header > h1, header > h1,
h2 > a { h2 > a {
font-size: 2rem !important; font-size: 2rem !important;
}
#bmc-wbtn{
bottom: 15px;
}
#bmc-wbtn + div{
bottom:15px;
} }

View File

@@ -3,38 +3,43 @@ import Link from '@docusaurus/Link';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import Layout from '@theme/Layout'; import Layout from '@theme/Layout';
import HomepageFeatures from '@site/src/components/HomepageFeatures'; import HomepageFeatures from '@site/src/components/HomepageFeatures';
import BuyMeACoffee from '@site/src/components/BuyMeACoffee';
import Heading from '@theme/Heading'; import Heading from '@theme/Heading';
import styles from './index.module.css'; import styles from './index.module.css';
import BrowserOnly from '@docusaurus/BrowserOnly';
function HomepageHeader() { function HomepageHeader() {
const {siteConfig} = useDocusaurusContext(); const {siteConfig} = useDocusaurusContext();
return (<header className={clsx('hero hero--primary', styles.heroBanner)}> return (<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container"> <div className="container">
<Heading as="h1" className="hero__title"> <Heading as="h1" className="hero__title">
{siteConfig.title} {siteConfig.title}
</Heading> </Heading>
<img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo} style={{maxWidth: '20vh'}}/> <img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo}
<p className="hero__subtitle">{siteConfig.tagline}</p> style={{maxWidth: '20vh'}}/>
<div className={styles.buttons}> <p className="hero__subtitle">{siteConfig.tagline}</p>
<Link <div className={styles.buttons}>
className="button button--secondary button--lg" <Link
to="/intro"> className="button button--secondary button--lg"
Getting Started to="/intro">
</Link> Getting Started
</div> </Link>
</div> </div>
</header>); </div>
</header>);
} }
export default function Home() { export default function Home() {
const {siteConfig} = useDocusaurusContext(); const {siteConfig} = useDocusaurusContext();
return (<Layout return (<Layout
title={`Hello from ${siteConfig.title}`} title={`Hello from ${siteConfig.title}`}
description="Description will go into a meta tag in <head />"> description="Description will go into a meta tag in <head />">
<HomepageHeader/> <HomepageHeader/>
<main> <main>
<HomepageFeatures/> <HomepageFeatures/>
</main> <BrowserOnly>
</Layout>); {() => <BuyMeACoffee />}
} </BrowserOnly>
</main>
</Layout>);
}

View File

@@ -63,6 +63,10 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId> <artifactId>maven-javadoc-plugin</artifactId>
<version>3.5.0</version> <version>3.5.0</version>
<configuration>
<!-- to disable the "missing" warnings. Remove the doclint to enable warnings-->
<doclint>all,-missing</doclint>
</configuration>
<executions> <executions>
<execution> <execution>
<id>attach-javadocs</id> <id>attach-javadocs</id>
@@ -136,6 +140,11 @@
<version>${lombok.version}</version> <version>${lombok.version}</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.18.1</version>
</dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>

View File

@@ -1,19 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonValue;
/**
* Defines the possible Chat Message roles.
*/
public enum OllamaChatMessageRole {
SYSTEM("system"),
USER("user"),
ASSISTANT("assistant");
@JsonValue
private String roleName;
private OllamaChatMessageRole(String roleName){
this.roleName = roleName;
}
}

View File

@@ -1,46 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequestModel() {
}
public OllamaGenerateRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequestModel)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -1,24 +1,23 @@
package io.github.amithkoujalgi.ollama4j.core; package io.github.ollama4j;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; import io.github.ollama4j.exceptions.RoleNotFoundException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolNotFoundException; import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.models.*; import io.github.ollama4j.exceptions.ToolNotFoundException;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; import io.github.ollama4j.models.chat.*;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.ps.ModelsProcessResponse;
import io.github.amithkoujalgi.ollama4j.core.models.request.*; import io.github.ollama4j.models.request.*;
import io.github.amithkoujalgi.ollama4j.core.tools.*; import io.github.ollama4j.models.response.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.tools.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import lombok.Setter; import lombok.Setter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*; import java.io.*;
import java.net.URI; import java.net.URI;
@@ -31,11 +30,19 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.time.Duration; import java.time.Duration;
import java.util.*; import java.util.*;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
/** /**
* The base Ollama API class. * The base Ollama API class.
*/ */
@SuppressWarnings("DuplicatedCode") @SuppressWarnings({"DuplicatedCode", "resource"})
public class OllamaAPI { public class OllamaAPI {
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
@@ -57,7 +64,14 @@ public class OllamaAPI {
private final ToolRegistry toolRegistry = new ToolRegistry(); private final ToolRegistry toolRegistry = new ToolRegistry();
/** /**
* Instantiates the Ollama API. * Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
**/
public OllamaAPI() {
this.host = "http://localhost:11434";
}
/**
* Instantiates the Ollama API with specified Ollama host address.
* *
* @param host the host address of Ollama server * @param host the host address of Ollama server
*/ */
@@ -89,12 +103,7 @@ public class OllamaAPI {
HttpClient httpClient = HttpClient.newHttpClient(); HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null; HttpRequest httpRequest = null;
try { try {
httpRequest = httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build();
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
} catch (URISyntaxException e) { } catch (URISyntaxException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@@ -111,28 +120,93 @@ public class OllamaAPI {
} }
/** /**
* List available models from Ollama server. * Provides a list of running models and details about each model currently loaded into memory.
* *
* @return the list * @return ModelsProcessResponse containing details about the running models
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws OllamaBaseException if the response indicates an error status
*/ */
public List<Model> listModels() public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { String url = this.host + "/api/ps";
String url = this.host + "/api/tags";
HttpClient httpClient = HttpClient.newHttpClient(); HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = HttpRequest httpRequest = null;
getRequestBuilderDefault(new URI(url)) try {
.header("Accept", "application/json") httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build();
.header("Content-type", "application/json") } catch (URISyntaxException e) {
.GET() throw new RuntimeException(e);
.build(); }
HttpResponse<String> response = HttpResponse<String> response = null;
httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
String responseString = response.body(); String responseString = response.body();
if (statusCode == 200) { if (statusCode == 200) {
return Utils.getObjectMapper() return Utils.getObjectMapper().readValue(responseString, ModelsProcessResponse.class);
.readValue(responseString, ListModelsResponse.class) } else {
.getModels(); throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/**
* Lists available models from the Ollama server.
*
* @return a list of models available on the server
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/
public List<Model> listModels() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
String url = this.host + "/api/tags";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build();
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper().readValue(responseString, ListModelsResponse.class).getModels();
} else {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/**
* Retrieves a list of models from the Ollama library. This method fetches the available models directly from Ollama
* library page, including model details such as the name, pull count, popular tags, tag count, and the time when model was updated.
*
* @return A list of {@link LibraryModel} objects representing the models available in the Ollama library.
* @throws OllamaBaseException If the HTTP request fails or the response is not successful (non-200 status code).
* @throws IOException If an I/O error occurs during the HTTP request or response processing.
* @throws InterruptedException If the thread executing the request is interrupted.
* @throws URISyntaxException If there is an error creating the URI for the HTTP request.
*/
public List<LibraryModel> listModelsFromLibrary() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
String url = "https://ollama.com/library";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build();
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
List<LibraryModel> models = new ArrayList<>();
if (statusCode == 200) {
Document doc = Jsoup.parse(responseString);
Elements modelSections = doc.selectXpath("//*[@id='repo']/ul/li/a");
for (Element e : modelSections) {
LibraryModel model = new LibraryModel();
Elements names = e.select("div > h2 > span");
Elements pullCounts = e.select("div:nth-of-type(2) > p > span:first-of-type > span:first-of-type");
Elements popularTags = e.select("div > div > span");
Elements tagCount = e.select("div:nth-of-type(2) > p > span:nth-of-type(2) > span:first-of-type");
Elements updatedAt = e.select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)");
model.setName(names.first().text());
model.setPullCount(pullCounts.first().text());
model.setPopularTags(popularTags.stream().map(Element::text).collect(Collectors.toList()));
model.setNumTags(Integer.parseInt(tagCount.first().text()));
model.setUpdatedAt(updatedAt.first().text());
models.add(model);
}
return models;
} else { } else {
throw new OllamaBaseException(statusCode + " - " + responseString); throw new OllamaBaseException(statusCode + " - " + responseString);
} }
@@ -143,29 +217,24 @@ public class OllamaAPI {
* href="https://ollama.ai/library">available models</a>. * href="https://ollama.ai/library">available models</a>.
* *
* @param modelName the name of the model * @param modelName the name of the model
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public void pullModel(String modelName) public void pullModel(String modelName) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
String url = this.host + "/api/pull"; String url = this.host + "/api/pull";
String jsonData = new ModelRequest(modelName).toString(); String jsonData = new ModelRequest(modelName).toString();
HttpRequest request = HttpRequest request = getRequestBuilderDefault(new URI(url)).POST(HttpRequest.BodyPublishers.ofString(jsonData)).header("Accept", "application/json").header("Content-type", "application/json").build();
getRequestBuilderDefault(new URI(url))
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.build();
HttpClient client = HttpClient.newHttpClient(); HttpClient client = HttpClient.newHttpClient();
HttpResponse<InputStream> response = HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream());
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
InputStream responseBodyStream = response.body(); InputStream responseBodyStream = response.body();
String responseString = ""; String responseString = "";
try (BufferedReader reader = try (BufferedReader reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line; String line;
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
ModelPullResponse modelPullResponse = ModelPullResponse modelPullResponse = Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
if (verbose) { if (verbose) {
logger.info(modelPullResponse.getStatus()); logger.info(modelPullResponse.getStatus());
} }
@@ -181,17 +250,15 @@ public class OllamaAPI {
* *
* @param modelName the model * @param modelName the model
* @return the model details * @return the model details
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public ModelDetail getModelDetails(String modelName) public ModelDetail getModelDetails(String modelName) throws IOException, OllamaBaseException, InterruptedException, URISyntaxException {
throws IOException, OllamaBaseException, InterruptedException, URISyntaxException {
String url = this.host + "/api/show"; String url = this.host + "/api/show";
String jsonData = new ModelRequest(modelName).toString(); String jsonData = new ModelRequest(modelName).toString();
HttpRequest request = HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build();
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
.build();
HttpClient client = HttpClient.newHttpClient(); HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
@@ -209,17 +276,15 @@ public class OllamaAPI {
* *
* @param modelName the name of the custom model to be created. * @param modelName the name of the custom model to be created.
* @param modelFilePath the path to model file that exists on the Ollama server. * @param modelFilePath the path to model file that exists on the Ollama server.
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public void createModelWithFilePath(String modelName, String modelFilePath) public void createModelWithFilePath(String modelName, String modelFilePath) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/create"; String url = this.host + "/api/create";
String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString();
HttpRequest request = HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build();
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.build();
HttpClient client = HttpClient.newHttpClient(); HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
@@ -243,17 +308,15 @@ public class OllamaAPI {
* *
* @param modelName the name of the custom model to be created. * @param modelName the name of the custom model to be created.
* @param modelFileContents the path to model file that exists on the Ollama server. * @param modelFileContents the path to model file that exists on the Ollama server.
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public void createModelWithModelFileContents(String modelName, String modelFileContents) public void createModelWithModelFileContents(String modelName, String modelFileContents) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/create"; String url = this.host + "/api/create";
String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString();
HttpRequest request = HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build();
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.build();
HttpClient client = HttpClient.newHttpClient(); HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
@@ -274,17 +337,15 @@ public class OllamaAPI {
* *
* @param modelName the name of the model to be deleted. * @param modelName the name of the model to be deleted.
* @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server.
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public void deleteModel(String modelName, boolean ignoreIfNotPresent) public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/delete"; String url = this.host + "/api/delete";
String jsonData = new ModelRequest(modelName).toString(); String jsonData = new ModelRequest(modelName).toString();
HttpRequest request = HttpRequest request = getRequestBuilderDefault(new URI(url)).method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).header("Accept", "application/json").header("Content-type", "application/json").build();
getRequestBuilderDefault(new URI(url))
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.build();
HttpClient client = HttpClient.newHttpClient(); HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
@@ -303,9 +364,13 @@ public class OllamaAPI {
* @param model name of model to generate embeddings from * @param model name of model to generate embeddings from
* @param prompt text to generate embeddings for * @param prompt text to generate embeddings for
* @return embeddings * @return embeddings
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @deprecated Use {@link #embed(String, List)} instead.
*/ */
public List<Double> generateEmbeddings(String model, String prompt) @Deprecated
throws IOException, InterruptedException, OllamaBaseException { public List<Double> generateEmbeddings(String model, String prompt) throws IOException, InterruptedException, OllamaBaseException {
return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
} }
@@ -314,28 +379,69 @@ public class OllamaAPI {
* *
* @param modelRequest request for '/api/embeddings' endpoint * @param modelRequest request for '/api/embeddings' endpoint
* @return embeddings * @return embeddings
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead.
*/ */
@Deprecated
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
URI uri = URI.create(this.host + "/api/embeddings"); URI uri = URI.create(this.host + "/api/embeddings");
String jsonData = modelRequest.toString(); String jsonData = modelRequest.toString();
HttpClient httpClient = HttpClient.newHttpClient(); HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest.Builder requestBuilder = HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData));
getRequestBuilderDefault(uri)
.header("Accept", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData));
HttpRequest request = requestBuilder.build(); HttpRequest request = requestBuilder.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode(); int statusCode = response.statusCode();
String responseBody = response.body(); String responseBody = response.body();
if (statusCode == 200) { if (statusCode == 200) {
OllamaEmbeddingResponseModel embeddingResponse = OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
return embeddingResponse.getEmbedding(); return embeddingResponse.getEmbedding();
} else { } else {
throw new OllamaBaseException(statusCode + " - " + responseBody); throw new OllamaBaseException(statusCode + " - " + responseBody);
} }
} }
/**
* Generate embeddings for a given text from a model
*
* @param model name of model to generate embeddings from
* @param inputs text/s to generate embeddings for
* @return embeddings
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/
public OllamaEmbedResponseModel embed(String model, List<String> inputs) throws IOException, InterruptedException, OllamaBaseException {
return embed(new OllamaEmbedRequestModel(model, inputs));
}
/**
* Generate embeddings using a {@link OllamaEmbedRequestModel}.
*
* @param modelRequest request for '/api/embed' endpoint
* @return embeddings
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/
public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
URI uri = URI.create(this.host + "/api/embed");
String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest);
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseBody = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class);
} else {
throw new OllamaBaseException(statusCode + " - " + responseBody);
}
}
/** /**
* Generate response for a question to a model running on Ollama server. This is a sync/blocking * Generate response for a question to a model running on Ollama server. This is a sync/blocking
@@ -348,10 +454,12 @@ public class OllamaAPI {
* details on the options</a> * details on the options</a>
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
* @return OllamaResult that includes response text and time taken for response * @return OllamaResult that includes response text and time taken for response
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
throws OllamaBaseException, IOException, InterruptedException { OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
ollamaRequestModel.setRaw(raw); ollamaRequestModel.setRaw(raw);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
@@ -367,13 +475,14 @@ public class OllamaAPI {
* @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. * @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
* @param options Additional options or configurations to use when generating the response. * @param options Additional options or configurations to use when generating the response.
* @return {@link OllamaResult} * @return {@link OllamaResult}
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaResult generate(String model, String prompt, boolean raw, Options options) public OllamaResult generate(String model, String prompt, boolean raw, Options options) throws OllamaBaseException, IOException, InterruptedException {
throws OllamaBaseException, IOException, InterruptedException {
return generate(model, prompt, raw, options, null); return generate(model, prompt, raw, options, null);
} }
/** /**
* Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools
* on the generated response. * on the generated response.
@@ -382,13 +491,11 @@ public class OllamaAPI {
* @param prompt The input text or prompt to provide to the AI model. * @param prompt The input text or prompt to provide to the AI model.
* @param options Additional options or configurations to use when generating the response. * @param options Additional options or configurations to use when generating the response.
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output.
* @throws OllamaBaseException If there is an error related to the Ollama API or service. * @throws OllamaBaseException if the response indicates an error status
* @throws IOException If there is an error related to input/output operations. * @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException If the method is interrupted while waiting for the AI model * @throws InterruptedException if the operation is interrupted
* to generate the response or for the tools to be invoked.
*/ */
public OllamaToolsResult generateWithTools(String model, String prompt, Options options) public OllamaToolsResult generateWithTools(String model, String prompt, Options options) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
boolean raw = true; boolean raw = true;
OllamaToolsResult toolResult = new OllamaToolsResult(); OllamaToolsResult toolResult = new OllamaToolsResult();
Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>(); Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
@@ -409,7 +516,6 @@ public class OllamaAPI {
return toolResult; return toolResult;
} }
/** /**
* Generate response for a question to a model running on Ollama server and get a callback handle * Generate response for a question to a model running on Ollama server and get a callback handle
* that can be used to check for status and get the response from the model later. This would be * that can be used to check for status and get the response from the model later. This would be
@@ -420,12 +526,10 @@ public class OllamaAPI {
* @return the ollama async result callback handle * @return the ollama async result callback handle
*/ */
public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) { public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw); ollamaRequestModel.setRaw(raw);
URI uri = URI.create(this.host + "/api/generate"); URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultStreamer ollamaAsyncResultStreamer = OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer(getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
new OllamaAsyncResultStreamer(
getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
ollamaAsyncResultStreamer.start(); ollamaAsyncResultStreamer.start();
return ollamaAsyncResultStreamer; return ollamaAsyncResultStreamer;
} }
@@ -442,15 +546,16 @@ public class OllamaAPI {
* details on the options</a> * details on the options</a>
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
* @return OllamaResult that includes response text and time taken for response * @return OllamaResult that includes response text and time taken for response
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaResult generateWithImageFiles( public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
List<String> images = new ArrayList<>(); List<String> images = new ArrayList<>();
for (File imageFile : imageFiles) { for (File imageFile : imageFiles) {
images.add(encodeFileToBase64(imageFile)); images.add(encodeFileToBase64(imageFile));
} }
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
} }
@@ -459,10 +564,12 @@ public class OllamaAPI {
* Convenience method to call Ollama API without streaming responses. * Convenience method to call Ollama API without streaming responses.
* <p> * <p>
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
*
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaResult generateWithImageFiles( public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options) throws OllamaBaseException, IOException, InterruptedException {
String model, String prompt, List<File> imageFiles, Options options)
throws OllamaBaseException, IOException, InterruptedException {
return generateWithImageFiles(model, prompt, imageFiles, options, null); return generateWithImageFiles(model, prompt, imageFiles, options, null);
} }
@@ -478,15 +585,17 @@ public class OllamaAPI {
* details on the options</a> * details on the options</a>
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
* @return OllamaResult that includes response text and time taken for response * @return OllamaResult that includes response text and time taken for response
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public OllamaResult generateWithImageURLs( public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
List<String> images = new ArrayList<>(); List<String> images = new ArrayList<>();
for (String imageURL : imageURLs) { for (String imageURL : imageURLs) {
images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
} }
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap()); ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
} }
@@ -495,14 +604,16 @@ public class OllamaAPI {
* Convenience method to call Ollama API without streaming responses. * Convenience method to call Ollama API without streaming responses.
* <p> * <p>
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
*
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
* @throws URISyntaxException if the URI for the request is malformed
*/ */
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
Options options)
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
return generateWithImageURLs(model, prompt, imageURLs, options, null); return generateWithImageURLs(model, prompt, imageURLs, options, null);
} }
/** /**
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
* 'api/chat'. * 'api/chat'.
@@ -513,6 +624,9 @@ public class OllamaAPI {
* @throws OllamaBaseException any response code than 200 has been returned * @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
@@ -520,7 +634,7 @@ public class OllamaAPI {
} }
/** /**
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p> * <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
* *
@@ -529,13 +643,16 @@ public class OllamaAPI {
* @throws OllamaBaseException any response code than 200 has been returned * @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
return chat(request, null); return chat(request, null);
} }
/** /**
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p> * <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
* *
@@ -545,8 +662,11 @@ public class OllamaAPI {
* @throws OllamaBaseException any response code than 200 has been returned * @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
* @throws OllamaBaseException if the response indicates an error status
* @throws IOException if an I/O error occurs during the HTTP request
* @throws InterruptedException if the operation is interrupted
*/ */
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
OllamaResult result; OllamaResult result;
if (streamHandler != null) { if (streamHandler != null) {
@@ -562,6 +682,37 @@ public class OllamaAPI {
toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
} }
/**
* Adds a custom role.
*
* @param roleName the name of the custom role to be added
* @return the newly created OllamaChatMessageRole
*/
public OllamaChatMessageRole addCustomRole(String roleName) {
return OllamaChatMessageRole.newCustomRole(roleName);
}
/**
* Lists all available roles.
*
* @return a list of available OllamaChatMessageRole objects
*/
public List<OllamaChatMessageRole> listRoles() {
return OllamaChatMessageRole.getRoles();
}
/**
* Retrieves a specific role by name.
*
* @param roleName the name of the role to retrieve
* @return the OllamaChatMessageRole associated with the given name
* @throws RoleNotFoundException if the role with the specified name does not exist
*/
public OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException {
return OllamaChatMessageRole.getRole(roleName);
}
// technical private methods // // technical private methods //
private static String encodeFileToBase64(File file) throws IOException { private static String encodeFileToBase64(File file) throws IOException {
@@ -572,11 +723,8 @@ public class OllamaAPI {
return Base64.getEncoder().encodeToString(bytes); return Base64.getEncoder().encodeToString(bytes);
} }
private OllamaResult generateSyncForOllamaRequestModel( private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateEndpointCaller requestCaller =
new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
OllamaResult result; OllamaResult result;
if (streamHandler != null) { if (streamHandler != null) {
ollamaRequestModel.setStream(true); ollamaRequestModel.setStream(true);
@@ -594,10 +742,7 @@ public class OllamaAPI {
* @return HttpRequest.Builder * @return HttpRequest.Builder
*/ */
private HttpRequest.Builder getRequestBuilderDefault(URI uri) { private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder = HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri).header("Content-Type", "application/json").timeout(Duration.ofSeconds(requestTimeoutSeconds));
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) { if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue()); requestBuilder.header("Authorization", getBasicAuthHeaderValue());
} }
@@ -623,7 +768,6 @@ public class OllamaAPI {
return basicAuth != null; return basicAuth != null;
} }
private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException {
try { try {
String methodName = toolFunctionCallSpec.getName(); String methodName = toolFunctionCallSpec.getName();

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class OllamaBaseException extends Exception { public class OllamaBaseException extends Exception {

View File

@@ -0,0 +1,8 @@
package io.github.ollama4j.exceptions;
public class RoleNotFoundException extends Exception {
public RoleNotFoundException(String s) {
super(s);
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class ToolInvocationException extends Exception { public class ToolInvocationException extends Exception {

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions; package io.github.ollama4j.exceptions;
public class ToolNotFoundException extends Exception { public class ToolNotFoundException extends Exception {

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.impl; package io.github.ollama4j.impl;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler { public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer(); private final StringBuffer response = new StringBuffer();

View File

@@ -1,13 +1,14 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer; import io.github.ollama4j.utils.FileToBase64Serializer;
import java.util.List; import java.util.List;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
@@ -33,13 +34,13 @@ public class OllamaChatMessage {
@JsonSerialize(using = FileToBase64Serializer.class) @JsonSerialize(using = FileToBase64Serializer.class)
private List<byte[]> images; private List<byte[]> images;
@Override @Override
public String toString() { public String toString() {
try { try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
}
} }
}
} }

View File

@@ -0,0 +1,53 @@
package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonValue;
import io.github.ollama4j.exceptions.RoleNotFoundException;
import lombok.Getter;
import java.util.ArrayList;
import java.util.List;
/**
* Defines the possible Chat Message roles.
*/
@Getter
public class OllamaChatMessageRole {
private static final List<OllamaChatMessageRole> roles = new ArrayList<>();
public static final OllamaChatMessageRole SYSTEM = new OllamaChatMessageRole("system");
public static final OllamaChatMessageRole USER = new OllamaChatMessageRole("user");
public static final OllamaChatMessageRole ASSISTANT = new OllamaChatMessageRole("assistant");
public static final OllamaChatMessageRole TOOL = new OllamaChatMessageRole("tool");
@JsonValue
private final String roleName;
private OllamaChatMessageRole(String roleName) {
this.roleName = roleName;
roles.add(this);
}
public static OllamaChatMessageRole newCustomRole(String roleName) {
OllamaChatMessageRole customRole = new OllamaChatMessageRole(roleName);
roles.add(customRole);
return customRole;
}
public static List<OllamaChatMessageRole> getRoles() {
return new ArrayList<>(roles);
}
public static OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException {
for (OllamaChatMessageRole role : roles) {
if (role.roleName.equals(roleName)) {
return role;
}
}
throw new RoleNotFoundException("Invalid role name: " + roleName);
}
@Override
public String toString() {
return roleName;
}
}

View File

@@ -1,8 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
@@ -16,20 +17,20 @@ import lombok.Setter;
*/ */
@Getter @Getter
@Setter @Setter
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody { public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
private List<OllamaChatMessage> messages; private List<OllamaChatMessage> messages;
public OllamaChatRequestModel() {} public OllamaChatRequest() {}
public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) { public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
this.model = model; this.model = model;
this.messages = messages; this.messages = messages;
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (!(o instanceof OllamaChatRequestModel)) { if (!(o instanceof OllamaChatRequest)) {
return false; return false;
} }

View File

@@ -1,4 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@@ -8,101 +13,92 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
/** /**
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
*/ */
public class OllamaChatRequestBuilder { public class OllamaChatRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
request = new OllamaChatRequestModel(model, messages); request = new OllamaChatRequest(model, messages);
} }
private OllamaChatRequestModel request; private OllamaChatRequest request;
public static OllamaChatRequestBuilder getInstance(String model){ public static OllamaChatRequestBuilder getInstance(String model) {
return new OllamaChatRequestBuilder(model, new ArrayList<>()); return new OllamaChatRequestBuilder(model, new ArrayList<>());
} }
public OllamaChatRequestModel build(){ public OllamaChatRequest build() {
return request; return request;
} }
public void reset(){ public void reset() {
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
} }
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){ public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) {
List<OllamaChatMessage> messages = this.request.getMessages(); List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = images.stream().map(file -> { List<byte[]> binaryImages = images.stream().map(file -> {
try { try {
return Files.readAllBytes(file.toPath()); return Files.readAllBytes(file.toPath());
} catch (IOException e) { } catch (IOException e) {
LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e); LOG.warn(String.format("File '%s' could not be accessed, will not add to message!", file.toPath()), e);
return new byte[0]; return new byte[0];
} }
}).collect(Collectors.toList()); }).collect(Collectors.toList());
messages.add(new OllamaChatMessage(role,content,binaryImages)); messages.add(new OllamaChatMessage(role, content, binaryImages));
return this; return this;
} }
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){ public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls) {
List<OllamaChatMessage> messages = this.request.getMessages(); List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = null; List<byte[]> binaryImages = null;
if(imageUrls.length>0){ if (imageUrls.length > 0) {
binaryImages = new ArrayList<>(); binaryImages = new ArrayList<>();
for (String imageUrl : imageUrls) { for (String imageUrl : imageUrls) {
try{ try {
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
} } catch (URISyntaxException e) {
catch (URISyntaxException e){ LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e);
LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e); } catch (IOException e) {
} LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e);
catch (IOException e){
LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
} }
} }
} }
messages.add(new OllamaChatMessage(role,content,binaryImages)); messages.add(new OllamaChatMessage(role, content, binaryImages));
return this; return this;
} }
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){ public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
this.request.getMessages().addAll(messages); return new OllamaChatRequestBuilder(request.getModel(), messages);
return this;
} }
public OllamaChatRequestBuilder withOptions(Options options){ public OllamaChatRequestBuilder withOptions(Options options) {
this.request.setOptions(options.getOptionsMap()); this.request.setOptions(options.getOptionsMap());
return this; return this;
} }
public OllamaChatRequestBuilder withGetJsonResponse(){ public OllamaChatRequestBuilder withGetJsonResponse() {
this.request.setReturnFormatJson(true); this.request.setReturnFormatJson(true);
return this; return this;
} }
public OllamaChatRequestBuilder withTemplate(String template){ public OllamaChatRequestBuilder withTemplate(String template) {
this.request.setTemplate(template); this.request.setTemplate(template);
return this; return this;
} }
public OllamaChatRequestBuilder withStreaming(){ public OllamaChatRequestBuilder withStreaming() {
this.request.setStream(true); this.request.setStream(true);
return this; return this;
} }
public OllamaChatRequestBuilder withKeepAlive(String keepAlive){ public OllamaChatRequestBuilder withKeepAlive(String keepAlive) {
this.request.setKeepAlive(keepAlive); this.request.setKeepAlive(keepAlive);
return this; return this;
} }

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data; import lombok.Data;

View File

@@ -1,19 +1,18 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.response.OllamaResult;
/** /**
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
* {@link OllamaChatMessageRole#ASSISTANT} role. * {@link OllamaChatMessageRole#ASSISTANT} role.
*/ */
public class OllamaChatResult extends OllamaResult{ public class OllamaChatResult extends OllamaResult {
private List<OllamaChatMessage> chatHistory; private List<OllamaChatMessage> chatHistory;
public OllamaChatResult(String response, long responseTime, int httpStatusCode, public OllamaChatResult(String response, long responseTime, int httpStatusCode, List<OllamaChatMessage> chatHistory) {
List<OllamaChatMessage> chatHistory) {
super(response, responseTime, httpStatusCode); super(response, responseTime, httpStatusCode);
this.chatHistory = chatHistory; this.chatHistory = chatHistory;
appendAnswerToChatHistory(response); appendAnswerToChatHistory(response);
@@ -21,12 +20,10 @@ public class OllamaChatResult extends OllamaResult{
public List<OllamaChatMessage> getChatHistory() { public List<OllamaChatMessage> getChatHistory() {
return chatHistory; return chatHistory;
} }
private void appendAnswerToChatHistory(String answer){ private void appendAnswerToChatHistory(String answer) {
OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer); OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
this.chatHistory.add(assistantMessage); this.chatHistory.add(assistantMessage);
} }
} }

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.ollama4j.models.chat;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@@ -0,0 +1,41 @@
package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import java.util.List;
import java.util.Map;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
@Data
@RequiredArgsConstructor
@NoArgsConstructor
public class OllamaEmbedRequestModel {
@NonNull
private String model;
@NonNull
private List<String> input;
private Map<String, Object> options;
@JsonProperty(value = "keep_alive")
private String keepAlive;
@JsonProperty(value = "truncate")
private Boolean truncate = true;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,25 @@
package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
@SuppressWarnings("unused")
@Data
public class OllamaEmbedResponseModel {
@JsonProperty("model")
private String model;
@JsonProperty("embeddings")
private List<List<Double>> embeddings;
@JsonProperty("total_duration")
private long totalDuration;
@JsonProperty("load_duration")
private long loadDuration;
@JsonProperty("prompt_eval_count")
private int promptEvalCount;
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
public class OllamaEmbeddingsRequestBuilder { public class OllamaEmbeddingsRequestBuilder {

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.embeddings; package io.github.ollama4j.models.embeddings;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;

View File

@@ -0,0 +1,46 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequest() {
}
public OllamaGenerateRequest(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequest(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequest)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -1,24 +1,24 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.ollama4j.utils.Options;
/** /**
* Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} * Helper class for creating {@link OllamaGenerateRequest}
* objects using the builder-pattern. * objects using the builder-pattern.
*/ */
public class OllamaGenerateRequestBuilder { public class OllamaGenerateRequestBuilder {
private OllamaGenerateRequestBuilder(String model, String prompt){ private OllamaGenerateRequestBuilder(String model, String prompt){
request = new OllamaGenerateRequestModel(model, prompt); request = new OllamaGenerateRequest(model, prompt);
} }
private OllamaGenerateRequestModel request; private OllamaGenerateRequest request;
public static OllamaGenerateRequestBuilder getInstance(String model){ public static OllamaGenerateRequestBuilder getInstance(String model){
return new OllamaGenerateRequestBuilder(model,""); return new OllamaGenerateRequestBuilder(model,"");
} }
public OllamaGenerateRequestModel build(){ public OllamaGenerateRequest build(){
return request; return request;
} }

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate; package io.github.ollama4j.models.generate;
import java.util.function.Consumer; import java.util.function.Consumer;

View File

@@ -0,0 +1,63 @@
package io.github.ollama4j.models.ps;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelsProcessResponse {
@JsonProperty("models")
private List<ModelProcess> models;
@Data
@NoArgsConstructor
public static class ModelProcess {
@JsonProperty("name")
private String name;
@JsonProperty("model")
private String model;
@JsonProperty("size")
private long size;
@JsonProperty("digest")
private String digest;
@JsonProperty("details")
private ModelDetails details;
@JsonProperty("expires_at")
private String expiresAt; // Consider using LocalDateTime if you need to process date/time
@JsonProperty("size_vram")
private long sizeVram;
}
@Data
@NoArgsConstructor
public static class ModelDetails {
@JsonProperty("parent_model")
private String parentModel;
@JsonProperty("format")
private String format;
@JsonProperty("family")
private String family;
@JsonProperty("families")
private List<String> families;
@JsonProperty("parameter_size")
private String parameterSize;
@JsonProperty("quantization_level")
private String quantizationLevel;
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.request;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@@ -1,14 +1,13 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.chat.OllamaChatResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.request;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
@@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer; import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data
@JsonInclude(JsonInclude.Include.NON_NULL) @JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class OllamaCommonRequestModel { public abstract class OllamaCommonRequest {
protected String model; protected String model;
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)

View File

@@ -1,12 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -78,19 +77,19 @@ public abstract class OllamaEndpointCaller {
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
if (statusCode == 404) { if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)"); LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) { } else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)"); LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper() Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) { } else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)"); LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class); OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else { } else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);

View File

@@ -1,14 +1,13 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -0,0 +1,15 @@
package io.github.ollama4j.models.response;
import java.util.ArrayList;
import java.util.List;
import lombok.Data;
@Data
public class LibraryModel {
private String name;
private String pullCount;
private int numTags;
private List<String> popularTags = new ArrayList<>();
private String updatedAt;
}

View File

@@ -1,6 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import java.util.List; import java.util.List;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,11 +1,10 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import java.time.LocalDateTime;
import java.time.OffsetDateTime; import java.time.OffsetDateTime;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,9 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,9 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Data; import lombok.Data;
@Data @Data

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data; import lombok.Data;

View File

@@ -1,10 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data; import lombok.Data;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import lombok.Getter; import lombok.Getter;
@@ -25,7 +24,7 @@ import java.time.Duration;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class OllamaAsyncResultStreamer extends Thread { public class OllamaAsyncResultStreamer extends Thread {
private final HttpRequest.Builder requestBuilder; private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequestModel ollamaRequestModel; private final OllamaGenerateRequest ollamaRequestModel;
private final OllamaResultStream stream = new OllamaResultStream(); private final OllamaResultStream stream = new OllamaResultStream();
private String completeResponse; private String completeResponse;
@@ -56,7 +55,7 @@ public class OllamaAsyncResultStreamer extends Thread {
public OllamaAsyncResultStreamer( public OllamaAsyncResultStreamer(
HttpRequest.Builder requestBuilder, HttpRequest.Builder requestBuilder,
OllamaGenerateRequestModel ollamaRequestModel, OllamaGenerateRequest ollamaRequestModel,
long requestTimeoutSeconds) { long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder; this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel; this.ollamaRequestModel = ollamaRequestModel;
@@ -91,8 +90,8 @@ public class OllamaAsyncResultStreamer extends Thread {
StringBuilder responseBuffer = new StringBuilder(); StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
if (statusCode == 404) { if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel = OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
stream.add(ollamaResponseModel.getError()); stream.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError());
} else { } else {

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data; import lombok.Data;
@Data @Data
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaErrorResponseModel { public class OllamaErrorResponse {
private String error; private String error;
} }

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models; package io.github.ollama4j.models.response;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core; package io.github.ollama4j.models.response;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.response.OllamaResult;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import java.util.Map; import java.util.Map;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.core.tools; package io.github.ollama4j.tools;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.types; package io.github.ollama4j.types;
/** /**
* A class to provide constants for all the supported models by Ollama. * A class to provide constants for all the supported models by Ollama.
@@ -10,10 +10,9 @@ package io.github.amithkoujalgi.ollama4j.core.types;
public class OllamaModelType { public class OllamaModelType {
public static final String GEMMA = "gemma"; public static final String GEMMA = "gemma";
public static final String GEMMA2 = "gemma2"; public static final String GEMMA2 = "gemma2";
public static final String LLAMA2 = "llama2"; public static final String LLAMA2 = "llama2";
public static final String LLAMA3 = "llama3"; public static final String LLAMA3 = "llama3";
public static final String LLAMA3_1 = "llama3.1";
public static final String MISTRAL = "mistral"; public static final String MISTRAL = "mistral";
public static final String MIXTRAL = "mixtral"; public static final String MIXTRAL = "mixtral";
public static final String LLAVA = "llava"; public static final String LLAVA = "llava";
@@ -33,7 +32,6 @@ public class OllamaModelType {
public static final String ZEPHYR = "zephyr"; public static final String ZEPHYR = "zephyr";
public static final String OPENHERMES = "openhermes"; public static final String OPENHERMES = "openhermes";
public static final String QWEN = "qwen"; public static final String QWEN = "qwen";
public static final String QWEN2 = "qwen2"; public static final String QWEN2 = "qwen2";
public static final String WIZARDCODER = "wizardcoder"; public static final String WIZARDCODER = "wizardcoder";
public static final String LLAMA2_CHINESE = "llama2-chinese"; public static final String LLAMA2_CHINESE = "llama2-chinese";

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.IOException; import java.io.IOException;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.IOException; import java.io.IOException;
import java.util.Base64; import java.util.Base64;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.net.http.HttpRequest.BodyPublisher; import java.net.http.HttpRequest.BodyPublisher;
import java.net.http.HttpRequest.BodyPublishers; import java.net.http.HttpRequest.BodyPublishers;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.util.Map; import java.util.Map;
import lombok.Data; import lombok.Data;

View File

@@ -1,5 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
/** Builder class for creating options for Ollama model. */ /** Builder class for creating options for Ollama model. */
@@ -207,6 +208,34 @@ public class OptionsBuilder {
return this; return this;
} }
/**
* Alternative to the top_p, and aims to ensure a balance of qualityand variety. The parameter p
* represents the minimum probability for a token to be considered, relative to the probability
* of the most likely token. For example, with p=0.05 and the most likely token having a
* probability of 0.9, logits with a value less than 0.045 are filtered out. (Default: 0.0)
*/
public OptionsBuilder setMinP(float value) {
options.getOptionsMap().put("min_p", value);
return this;
}
/**
* Allows passing an option not formally supported by the library
* @param name The option name for the parameter.
* @param value The value for the "{name}" parameter.
* @return The updated OptionsBuilder.
* @throws IllegalArgumentException if parameter has an unsupported type
*/
public OptionsBuilder setCustomOption(String name, Object value) throws IllegalArgumentException {
if (!(value instanceof Integer || value instanceof Float || value instanceof String)) {
throw new IllegalArgumentException("Invalid type for parameter. Allowed types are: Integer, Float, or String.");
}
options.getOptionsMap().put(name, value);
return this;
}
/** /**
* Builds the options map. * Builds the options map.
* *
@@ -215,4 +244,6 @@ public class OptionsBuilder {
public Options build() { public Options build() {
return options; return options;
} }
} }

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
/** /**
* The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It * The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import java.io.InputStream; import java.io.InputStream;
import java.util.Scanner; import java.util.Scanner;

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.utils; package io.github.ollama4j.utils;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;

View File

@@ -1,16 +1,16 @@
package io.github.amithkoujalgi.ollama4j.integrationtests; package io.github.ollama4j.integrationtests;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; import io.github.ollama4j.models.response.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
import lombok.Data; import lombok.Data;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Order;
@@ -177,7 +177,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
.withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
.build(); .build();
@@ -197,7 +197,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, .withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
@@ -219,7 +219,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?") "What is the capital of France? And what's France's connection with Mona Lisa?")
.build(); .build();
@@ -245,7 +245,7 @@ class TestRealAPIs {
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder builder =
OllamaChatRequestBuilder.getInstance(config.getImageModel()); OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
@@ -275,7 +275,7 @@ class TestRealAPIs {
testEndpointReachability(); testEndpointReachability();
try { try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
.build(); .build();

View File

@@ -1,12 +1,16 @@
package io.github.amithkoujalgi.ollama4j.unittests; package io.github.ollama4j.unittests;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; import io.github.ollama4j.exceptions.RoleNotFoundException;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer; import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@@ -14,7 +18,9 @@ import java.io.IOException;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
class TestMockedAPIs { class TestMockedAPIs {
@@ -97,6 +103,34 @@ class TestMockedAPIs {
} }
} }
@Test
void testEmbed() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
List<String> inputs = List.of("some prompt text");
try {
when(ollamaAPI.embed(model, inputs)).thenReturn(new OllamaEmbedResponseModel());
ollamaAPI.embed(model, inputs);
verify(ollamaAPI, times(1)).embed(model, inputs);
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testEmbedWithEmbedRequestModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
List<String> inputs = List.of("some prompt text");
try {
when(ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs))).thenReturn(new OllamaEmbedResponseModel());
ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs));
verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequestModel(model, inputs));
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test @Test
void testAsk() { void testAsk() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
@@ -161,4 +195,68 @@ class TestMockedAPIs {
ollamaAPI.generateAsync(model, prompt, false); ollamaAPI.generateAsync(model, prompt, false);
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
} }
@Test
void testAddCustomRole() {
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
String roleName = "custom-role";
OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName);
when(ollamaAPI.addCustomRole(roleName)).thenReturn(expectedRole);
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole(roleName);
assertEquals(expectedRole, customRole);
verify(ollamaAPI, times(1)).addCustomRole(roleName);
}
@Test
void testListRoles() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
OllamaChatMessageRole role1 = OllamaChatMessageRole.newCustomRole("role1");
OllamaChatMessageRole role2 = OllamaChatMessageRole.newCustomRole("role2");
List<OllamaChatMessageRole> expectedRoles = List.of(role1, role2);
when(ollamaAPI.listRoles()).thenReturn(expectedRoles);
List<OllamaChatMessageRole> actualRoles = ollamaAPI.listRoles();
assertEquals(expectedRoles, actualRoles);
verify(ollamaAPI, times(1)).listRoles();
}
@Test
void testGetRoleNotFound() {
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
String roleName = "non-existing-role";
try {
when(ollamaAPI.getRole(roleName)).thenThrow(new RoleNotFoundException("Role not found"));
} catch (RoleNotFoundException exception) {
throw new RuntimeException("Failed to run test: testGetRoleNotFound");
}
try {
ollamaAPI.getRole(roleName);
fail("Expected RoleNotFoundException not thrown");
} catch (RoleNotFoundException exception) {
assertEquals("Role not found", exception.getMessage());
}
try {
verify(ollamaAPI, times(1)).getRole(roleName);
} catch (RoleNotFoundException exception) {
throw new RuntimeException("Failed to run test: testGetRoleNotFound");
}
}
@Test
void testGetRoleFound() {
OllamaAPI ollamaAPI = mock(OllamaAPI.class);
String roleName = "existing-role";
OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName);
try {
when(ollamaAPI.getRole(roleName)).thenReturn(expectedRole);
} catch (RoleNotFoundException exception) {
throw new RuntimeException("Failed to run test: testGetRoleFound");
}
try {
OllamaChatMessageRole actualRole = ollamaAPI.getRole(roleName);
assertEquals(expectedRole, actualRole);
verify(ollamaAPI, times(1)).getRole(roleName);
} catch (RoleNotFoundException exception) {
throw new RuntimeException("Failed to run test: testGetRoleFound");
}
}
} }

View File

@@ -1,10 +1,10 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.ollama4j.utils.Utils;
public abstract class AbstractSerializationTest<T> { public abstract class AbstractSerializationTest<T> {

View File

@@ -1,20 +1,21 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrowsExactly;
import java.io.File; import java.io.File;
import java.util.List; import java.util.List;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import org.json.JSONObject; import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> { public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequest> {
private OllamaChatRequestBuilder builder; private OllamaChatRequestBuilder builder;
@@ -25,32 +26,32 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test @Test
public void testRequestOnlyMandatoryFields() { public void testRequestOnlyMandatoryFields() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestMultipleMessages() { public void testRequestMultipleMessages() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
.withMessage(OllamaChatMessageRole.USER, "Some prompt") .withMessage(OllamaChatMessageRole.USER, "Some prompt")
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestWithMessageAndImage() { public void testRequestWithMessageAndImage() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testRequestWithOptions() { public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder(); OptionsBuilder b = new OptionsBuilder();
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setMirostat(1).build()) .withOptions(b.setMirostat(1).build())
.withOptions(b.setTemperature(1L).build()) .withOptions(b.setTemperature(1L).build())
.withOptions(b.setMirostatEta(1L).build()) .withOptions(b.setMirostatEta(1L).build())
@@ -59,10 +60,14 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
.withOptions(b.setSeed(1).build()) .withOptions(b.setSeed(1).build())
.withOptions(b.setTopK(1).build()) .withOptions(b.setTopK(1).build())
.withOptions(b.setTopP(1).build()) .withOptions(b.setTopP(1).build())
.withOptions(b.setMinP(1).build())
.withOptions(b.setCustomOption("cust_float", 1.0f).build())
.withOptions(b.setCustomOption("cust_int", 1).build())
.withOptions(b.setCustomOption("cust_str", "custom").build())
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class); OllamaChatRequest deserializeRequest = deserialize(jsonRequest, OllamaChatRequest.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat")); assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
@@ -72,11 +77,25 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
assertEquals(1, deserializeRequest.getOptions().get("seed")); assertEquals(1, deserializeRequest.getOptions().get("seed"));
assertEquals(1, deserializeRequest.getOptions().get("top_k")); assertEquals(1, deserializeRequest.getOptions().get("top_k"));
assertEquals(1.0, deserializeRequest.getOptions().get("top_p")); assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
assertEquals(1.0, deserializeRequest.getOptions().get("min_p"));
assertEquals(1.0, deserializeRequest.getOptions().get("cust_float"));
assertEquals(1, deserializeRequest.getOptions().get("cust_int"));
assertEquals("custom", deserializeRequest.getOptions().get("cust_str"));
}
@Test
public void testRequestWithInvalidCustomOption() {
OptionsBuilder b = new OptionsBuilder();
assertThrowsExactly(IllegalArgumentException.class, () -> {
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setCustomOption("cust_obj", new Object()).build())
.build();
});
} }
@Test @Test
public void testWithJsonFormat() { public void testWithJsonFormat() {
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withGetJsonResponse().build(); .withGetJsonResponse().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
@@ -89,25 +108,25 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test @Test
public void testWithTemplate() { public void testWithTemplate() {
OllamaChatRequestModel req = builder.withTemplate("System Template") OllamaChatRequest req = builder.withTemplate("System Template")
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
} }
@Test @Test
public void testWithStreaming() { public void testWithStreaming() {
OllamaChatRequestModel req = builder.withStreaming().build(); OllamaChatRequest req = builder.withStreaming().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true); assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).isStream(), true);
} }
@Test @Test
public void testWithKeepAlive() { public void testWithKeepAlive() {
String expectedKeepAlive = "5m"; String expectedKeepAlive = "5m";
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) OllamaChatRequest req = builder.withKeepAlive(expectedKeepAlive)
.build(); .build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).getKeepAlive(), expectedKeepAlive);
} }
} }

View File

@@ -1,11 +1,11 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.ollama4j.utils.OptionsBuilder;
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> { public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {

View File

@@ -1,17 +1,17 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import org.json.JSONObject; import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder; import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> { public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequest> {
private OllamaGenerateRequestBuilder builder; private OllamaGenerateRequestBuilder builder;
@@ -22,27 +22,27 @@ public class TestGenerateRequestSerialization extends AbstractSerializationTest<
@Test @Test
public void testRequestOnlyMandatoryFields() { public void testRequestOnlyMandatoryFields() {
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req); assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
} }
@Test @Test
public void testRequestWithOptions() { public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder(); OptionsBuilder b = new OptionsBuilder();
OllamaGenerateRequestModel req = OllamaGenerateRequest req =
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class); OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat")); assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
} }
@Test @Test
public void testWithJsonFormat() { public void testWithJsonFormat() {
OllamaGenerateRequestModel req = OllamaGenerateRequest req =
builder.withPrompt("Some prompt").withGetJsonResponse().build(); builder.withPrompt("Some prompt").withGetJsonResponse().build();
String jsonRequest = serialize(req); String jsonRequest = serialize(req);

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.unittests.jackson; package io.github.ollama4j.unittests.jackson;
import io.github.amithkoujalgi.ollama4j.core.models.Model; import io.github.ollama4j.models.response.Model;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> { public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {