Compare commits

..

378 Commits

Author SHA1 Message Date
Amith Koujalgi
740bd3750b Merge pull request #92 from ollama4j/90
Addresses issue where creation of model was failing
2025-02-17 22:37:20 +05:30
Amith Koujalgi
c9aa6c9e08 Merge branch 'main' into 90 2025-02-17 22:35:38 +05:30
Amith Koujalgi
71bba6ee0d Added GH action to run tests 2025-02-17 22:33:02 +05:30
Amith Koujalgi
27b2201ff9 Added GH action to run tests 2025-02-17 22:31:56 +05:30
Amith Koujalgi
23d23c4ad7 Added new createModel API to make it conform to Ollama's new API - https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model 2025-02-17 22:25:25 +05:30
amithkoujalgi
e409ff1cf9 Update OllamaAPI.java 2025-02-03 08:56:49 +05:30
amithkoujalgi
9a12cebb68 Update README.md 2025-02-01 23:13:35 +05:30
amithkoujalgi
24f5bc4fec Delete close-issue.yml 2025-02-01 09:10:46 +05:30
Amith Koujalgi
d7c313417b Update label-issue-stale.yml 2025-02-01 09:03:57 +05:30
Amith Koujalgi
b67b4c7eb5 Update publish-docs.yml 2025-02-01 00:10:37 +05:30
Amith Koujalgi
ab70201844 Merge pull request #89 from kwongiho/main
Add support for deepseek-r1 model
2025-02-01 00:00:26 +05:30
kwongiho
ac8a40a017 Add support for deepseek-r1 model 2025-01-30 21:32:29 +09:00
Amith Koujalgi
1ac65f821b Update label-issue-stale.yml 2025-01-29 00:34:17 +05:30
Amith Koujalgi
d603c4b94b Update label-issue-stale.yml 2025-01-29 00:17:34 +05:30
Amith Koujalgi
a418cbc1dc Create label-issue-stale.yml 2025-01-29 00:17:11 +05:30
Amith Koujalgi
785dd12730 Update close-issue.yml 2025-01-28 23:52:22 +05:30
Amith Koujalgi
dda807d818 Merge pull request #88 from seeseemelk/feature/token-streamer
Add ability to stream tokens in chat
2025-01-26 17:37:22 +05:30
Amith Koujalgi
a06a4025fa Merge pull request #87 from seeseemelk/feature/annotated-objects
Add support for registering object instances
2025-01-26 17:35:58 +05:30
761fbc3398 Add support for streaming tokens 2025-01-24 15:05:33 +01:00
a96dc11679 Fix random test failure 2025-01-24 15:05:32 +01:00
b2b3febdaa Add support for registering object instances instead of only through the @OllamaToolService annotation 2025-01-24 13:38:47 +01:00
amithkoujalgi
f27bea11d5 Merge branch 'main' of https://github.com/ollama4j/ollama4j 2025-01-14 10:44:13 +05:30
amithkoujalgi
9503451d5a Create close-issue.yml 2025-01-14 10:42:58 +05:30
Amith Koujalgi
04bae4ca6a Update README.md 2025-01-14 10:06:30 +05:30
Amith Koujalgi
3e33b8df62 Update README.md 2025-01-13 20:08:42 +05:30
Amith Koujalgi
a494053263 Merge pull request #85 from AgentSchmecker/feature/annotationBasedTools
Feature/annotation based tools
2025-01-04 23:52:51 +05:30
Markus Klenke
260c57ca84 Removes system.err lines 2024-12-27 23:10:08 +01:00
Markus Klenke
db008de0ca Adds documentation for annotation based Tool registration 2024-12-27 23:07:35 +01:00
Markus Klenke
1b38466f44 Adds BigDecimal type for ToolProperty typeCast 2024-12-27 23:05:08 +01:00
Markus Klenke
26ec00dab8 Adds Javadoc for new classes and annotations 2024-12-27 22:33:44 +01:00
Markus Klenke
5e6971cc4a Adds first approach to annotation based tool callings using basic java reflection 2024-12-27 22:20:34 +01:00
Amith Koujalgi
8b3417ecda Merge pull request #82 from AgentSchmecker/feature/toolextension_for_chat_model
Enable chat API to use Tools
2024-12-17 12:03:56 +05:30
Markus Klenke
35f5f34196 Adds doc for tool-based chat API calls 2024-12-09 23:30:05 +01:00
Markus Klenke
d8c3edd55f Parametrizes the max chat tool call retries for a single chat request 2024-12-09 23:29:43 +01:00
Markus Klenke
7ffbc5d3f2 Adds implicit tool calling for streamed chat requests (requires Ollama v0.4.6) 2024-12-09 23:07:25 +01:00
Markus Klenke
c4b7830614 Fixes merge conflicts 2024-12-07 01:18:12 +01:00
Markus Klenke
69f6fd81cf Enables in chat tool calling 2024-12-07 01:17:08 +01:00
Markus Klenke
b6a293add7 Makes changes to OllamaChatResult backwards compatible 2024-12-07 01:17:08 +01:00
Markus Klenke
25694a8bc9 extends ollamaChatResult to have full access to OllamaChatResult 2024-12-07 01:17:01 +01:00
Markus Klenke
12bb10392e Extends ChatModels to use Tools and ToolCalls 2024-12-07 01:16:25 +01:00
Markus Klenke
e9c33ab0b2 Extends chat API to automatically load registered Tools 2024-12-07 01:16:25 +01:00
Markus Klenke
903a8176cd Extends ToolSpec to have PromptDef for ChatRequests 2024-12-07 01:16:25 +01:00
Amith Koujalgi
4a91918e84 Merge pull request #81 from AgentSchmecker/bugfix/79
Fixes for #78 and #79
2024-12-05 16:42:30 +05:30
Markus Klenke
ff3344616c Fixes NPE in #78 2024-12-04 22:57:48 +01:00
Markus Klenke
726fea5b74 Fixes #79 2024-12-04 22:28:00 +01:00
Markus Klenke
a09f1362e9 Adds Builder for EmbedRequests and deprecates old Embedding Models 2024-12-02 22:48:33 +01:00
amithkoujalgi
4ef0821932 updated README.md
Update README.md
2024-11-09 01:01:38 +05:30
amithkoujalgi
2d3cf228cb added findModelTagFromLibrary API 2024-11-08 12:37:58 +05:30
amithkoujalgi
5b3713c69e added getLibraryModelDetails API and pullModel API with LibraryModelTag 2024-11-08 11:23:47 +05:30
Amith Koujalgi
e9486cbb8e Merge pull request #76 from ollama4j/model-listing
updated `listModelsFromLibrary` API
2024-11-08 10:05:19 +05:30
amithkoujalgi
057f0babeb updated listModelsFromLibrary API
updated `listModelsFromLibrary` API
2024-11-08 10:02:27 +05:30
Amith Koujalgi
da146640ca Update README.md 2024-11-08 00:08:13 +05:30
Amith Koujalgi
82be761b86 Merge pull request #75 from ollama4j/model-listing
`listModelsFromLibrary` API
2024-11-07 23:54:56 +05:30
amithkoujalgi
9c3fc49df1 added listModelsFromLibrary API 2024-11-07 23:53:11 +05:30
Amith Koujalgi
5f19eb17ac Update OllamaAPI.java 2024-11-07 21:53:41 +05:30
Amith Koujalgi
ecb04d6d82 Cleanup 2024-10-31 21:22:17 +05:30
Amith Koujalgi
3fc7e9423c Updated docs 2024-10-31 17:57:02 +05:30
Amith Koujalgi
405a08b330 Updated docs 2024-10-31 16:25:05 +05:30
Amith Koujalgi
921f745435 Custom roles support
Adds support for custom roles using `OllamaChatMessageRole`
2024-10-31 16:15:21 +05:30
Amith Koujalgi
bedfec6bf9 Update generate-embeddings.md 2024-10-30 11:07:40 +05:30
Amith Koujalgi
afa09e87a5 Update OllamaAPI.java 2024-10-30 11:02:37 +05:30
Amith Koujalgi
baf2320ea6 Updated javadoc 2024-10-30 11:01:23 +05:30
Amith Koujalgi
948a7444fb Update README.md 2024-10-30 00:54:33 +05:30
Amith Koujalgi
ec0eb8b469 Update README.md 2024-10-30 00:44:08 +05:30
Amith Koujalgi
8f33de7e59 Update README.md 2024-10-30 00:43:09 +05:30
Amith Koujalgi
8c59e6511b Update README.md 2024-10-30 00:41:55 +05:30
Amith Koujalgi
b93fc7623a Updated javadoc 2024-10-30 00:28:53 +05:30
Amith Koujalgi
bd1a57c7e0 Added support for new embed API /api/embed 2024-10-30 00:03:49 +05:30
Amith Koujalgi
7fabead249 Merge pull request #73 from daguava/tool-role
Add 'tool' role
2024-10-29 22:05:43 +05:30
Mitchell Lutzke
268a973d5e Add tool role 2024-10-27 17:06:25 -07:00
Amith Koujalgi
d949a3cb69 Merge pull request #72 from daguava/minp-custom-options
Add minp option and ability to set custom options
2024-10-27 20:05:24 +05:30
Mitchell Lutzke
e2443ed68a Add throws to the docs 2024-10-26 22:08:03 -07:00
Mitchell Lutzke
37193b1f5b slight cleanup 2024-10-26 21:36:43 -07:00
Mitchell Lutzke
e33071ae38 Add minp option and ability to set custom options 2024-10-26 21:22:46 -07:00
Amith Koujalgi
fffc8dc526 Update README.md 2024-10-16 00:55:01 +05:30
Amith Koujalgi
def950cc9c Update README.md 2024-10-15 14:27:07 +05:30
Amith Koujalgi
f4db7ca326 Update README.md 2024-10-15 11:51:20 +05:30
Amith Koujalgi
18760250ea Update README.md 2024-10-11 21:21:20 +05:30
Amith Koujalgi
233597efd1 Update README.md 2024-10-01 23:38:49 +05:30
Amith Koujalgi
cec9f29eb7 Update README.md 2024-09-15 09:43:37 +05:30
Amith Koujalgi
20cb92a418 Update README.md 2024-09-15 08:48:31 +05:30
Amith Koujalgi
b0dc38954b Update README.md 2024-09-15 08:47:22 +05:30
Amith Koujalgi
1479d0a494 Update README.md 2024-09-08 16:28:46 +05:30
Amith Koujalgi
b328daee43 Update README.md 2024-09-08 16:28:23 +05:30
Amith Koujalgi
b90c8bc622 Docs build fixes
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 01:41:24 +05:30
Amith Koujalgi
660a1b937a Added llama3.1
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 01:24:13 +05:30
Amith Koujalgi
fd961d7037 Added BMC
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-09-05 00:48:50 +05:30
Amith Koujalgi
b48f9550c3 Update README.md 2024-09-04 00:10:09 +05:30
Amith Koujalgi
363969a275 Update README.md 2024-09-04 00:05:30 +05:30
Amith Koujalgi
992625cf86 Merge pull request #63 from peaster/gradle-dependency-fix
Change Gradle dependency example to use correct declaration
2024-08-10 10:20:09 +05:30
peaster
bbebd26d07 correct Gradle dependency example to use correct declaration 2024-08-09 16:24:43 -04:00
Amith Koujalgi
3aa0fc77cb Updated withMessages method of OllamaChatRequestBuilder to reset the messages 2024-08-09 01:39:21 +05:30
Amith Koujalgi
11a98a72a1 Updated docusaurus.config.js
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:22:37 +05:30
Amith Koujalgi
422601c0fc Updated ModelsProcessResponse class to support ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:11:15 +05:30
Amith Koujalgi
75e6576a13 Updated docs
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 11:00:54 +05:30
Amith Koujalgi
51dd3f3e1e Added Json ignore properties for ModelsProcessResponse
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-08-06 10:46:35 +05:30
Amith Koujalgi
30250f79d9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-28 00:06:55 +05:30
Amith Koujalgi
d4ee9ed051 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:55:21 +05:30
Amith Koujalgi
4412ac683a updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:53:54 +05:30
Amith Koujalgi
b5b1a26941 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:52:00 +05:30
Amith Koujalgi
a84230bbd1 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:30:53 +05:30
Amith Koujalgi
00c9b16556 Refactored classes into request and response packages
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 23:20:18 +05:30
Amith Koujalgi
9a2194334f Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:58:53 +05:30
Amith Koujalgi
f9cf11ecdf Added ps() API
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 20:54:04 +05:30
Amith Koujalgi
0af80865c3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:59:15 +05:30
Amith Koujalgi
a304c01194 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:58:43 +05:30
Amith Koujalgi
887708864e updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:56:19 +05:30
Amith Koujalgi
2f0c4fdcc9 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:54:34 +05:30
Amith Koujalgi
73aabd7ca6 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:48:05 +05:30
Amith Koujalgi
17ca2bdee3 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:46:53 +05:30
Amith Koujalgi
e43bd3acb4 Refactored the package structure to use io.github.ollama4j instead of the old naming io.github.amithkoujalgi.ollama4j.core
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 16:37:47 +05:30
Amith Koujalgi
0b041f4340 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:59:23 +05:30
Amith Koujalgi
6c6062b757 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:58:24 +05:30
Amith Koujalgi
68fd8b7cc8 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:57:38 +05:30
Amith Koujalgi
bb6f8aa343 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:56:36 +05:30
Amith Koujalgi
12802be0bc updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 15:35:35 +05:30
Amith Koujalgi
bd56ccfef7 Update README.md 2024-07-27 02:45:36 +05:30
Amith Koujalgi
51563f276f updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:57:55 +05:30
Amith Koujalgi
6e282124bf updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:56:57 +05:30
Amith Koujalgi
3ab9e4c283 Update README.md 2024-07-27 00:45:20 +05:30
Amith Koujalgi
2db6a22cc7 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:39:42 +05:30
Amith Koujalgi
cc69341620 updated links
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:22:16 +05:30
Amith Koujalgi
4589a9032c updated links 2024-07-27 00:21:34 +05:30
Amith Koujalgi
da273402b5 updated links 2024-07-27 00:16:44 +05:30
Amith Koujalgi
cfa8aa14d7 updated publishing config 2024-07-26 23:57:19 +05:30
Amith Koujalgi
bc4e8303aa Update README.md 2024-07-16 23:43:55 +05:30
Amith Koujalgi
f2f740a2a0 Update README.md 2024-07-16 23:42:47 +05:30
Amith Koujalgi
4cbb783a61 Update README.md 2024-07-16 23:41:29 +05:30
Amith Koujalgi
5c9e0b7d8a Update README.md 2024-07-16 23:40:12 +05:30
koujalgi.amith@gmail.com
2f8577a24d updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-16 09:42:47 +05:30
Amith Koujalgi
02116b7025 Create CODE_OF_CONDUCT.md 2024-07-15 22:57:01 +05:30
Amith Koujalgi
f3778f8786 Update README.md 2024-07-15 22:52:11 +05:30
Amith Koujalgi
c6141634db Update README.md 2024-07-15 22:29:36 +05:30
Amith Koujalgi
d9f98ad901 Update README.md 2024-07-15 22:23:46 +05:30
Amith Koujalgi
79d97445b8 Update README.md 2024-07-14 23:13:21 +05:30
Amith Koujalgi
1c40697c96 Update README.md 2024-07-14 23:11:48 +05:30
koujalgi.amith@gmail.com
f03026abb3 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 23:04:30 +05:30
koujalgi.amith@gmail.com
63a6e81ac2 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 22:54:51 +05:30
koujalgi.amith@gmail.com
76cad0f584 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 22:50:54 +05:30
Amith Koujalgi
bee2908d1e Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-14 13:34:12 +05:30
Amith Koujalgi
8a4c9fd969 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-14 13:27:23 +05:30
koujalgi.amith@gmail.com
d470f940b0 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:39:36 +05:30
koujalgi.amith@gmail.com
df402efaba updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:37:47 +05:30
koujalgi.amith@gmail.com
677362abbf updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:35:45 +05:30
koujalgi.amith@gmail.com
81689be194 Refactored tools API
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:23:36 +05:30
koujalgi.amith@gmail.com
fd93036d08 Refactor
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:07:23 +05:30
koujalgi.amith@gmail.com
c9b05a725b Refactor
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:05:43 +05:30
koujalgi.amith@gmail.com
a4e1b4afe9 Removed old maven-publish.yml
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:02:20 +05:30
koujalgi.amith@gmail.com
3d21813abb updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:00:52 +05:30
koujalgi.amith@gmail.com
383d0f56ca Updated generateAsync() API
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 23:54:49 +05:30
koujalgi.amith@gmail.com
af1b213a76 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 21:50:45 +05:30
koujalgi.amith@gmail.com
fed89a9643 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 21:49:26 +05:30
Amith Koujalgi
fd32aa33ff Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:30:13 +05:30
Amith Koujalgi
b8a13e89b1 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:22:08 +05:30
Amith Koujalgi
c8f27edd6e Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:19:44 +05:30
Amith Koujalgi
5a936d8174 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:18:01 +05:30
Amith Koujalgi
9b5ddbf4c4 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:10:52 +05:30
Amith Koujalgi
7c233d5734 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:09:01 +05:30
Amith Koujalgi
e85aeae6e0 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:02:50 +05:30
Amith Koujalgi
a05052e095 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 13:47:08 +05:30
Amith Koujalgi
10eb803e26 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 13:46:43 +05:30
koujalgi.amith@gmail.com
bd2da8fdda updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 19:06:40 +05:30
koujalgi.amith@gmail.com
b0bb082bec updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 19:06:11 +05:30
koujalgi.amith@gmail.com
81f564ef7f updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 19:05:01 +05:30
koujalgi.amith@gmail.com
006b52f3db updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 19:03:12 +05:30
Amith Koujalgi
16634e60e4 clean up
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 18:34:51 +05:30
koujalgi.amith@gmail.com
db8b73075b clean up
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 18:33:51 +05:30
Amith Koujalgi
dc9f79959a Updated logback-classic version
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 18:32:04 +05:30
Amith Koujalgi
88f6d00763 Docs fixes
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 18:24:48 +05:30
Amith Koujalgi
fd3a989a49 Updated GH action to publish docs on release creation
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 18:08:38 +05:30
Amith Koujalgi
7580c6a549 Merge pull request #56 from amithkoujalgi/test
Updated GitHub workflows to publish to GItHub packages instead of Maven Central
2024-07-13 18:06:44 +05:30
Amith Koujalgi
9e6503d84b clean up
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 18:01:53 +05:30
Amith Koujalgi
ee21f7fdd8 Updated GH action to publish maven pkg
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 17:55:25 +05:30
Amith Koujalgi
ecc295f484 Updated GH action to publish maven pkg
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 17:46:09 +05:30
Amith Koujalgi
c528fef5fc Updated GH action to publish maven pkg
Signed-off-by: Amith Koujalgi <amith.koujalgi@razorthink.com>
2024-07-13 17:38:03 +05:30
Amith Koujalgi
38f1bda105 Merge branch 'main' of https://github.com/amithkoujalgi/ollama4j into test 2024-07-13 17:27:43 +05:30
Amith Koujalgi
d8a703503a Merge remote-tracking branch 'origin/main' 2024-07-13 11:51:09 +05:30
Amith Koujalgi
dd9ba7c937 discarded outdated GH workflows
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 11:50:52 +05:30
amithkoujalgi
cf52c9610c [maven-release-plugin] prepare for next development iteration 2024-07-12 12:07:01 +00:00
amithkoujalgi
e8d709e99a [maven-release-plugin] prepare release v1.0.77 2024-07-12 12:07:00 +00:00
Amith Koujalgi
51fbedad69 Updated README.md 2024-07-12 17:35:41 +05:30
amithkoujalgi
953605fa73 [maven-release-plugin] prepare for next development iteration 2024-07-12 11:38:03 +00:00
amithkoujalgi
30bfdd9c6d [maven-release-plugin] prepare release v1.0.76 2024-07-12 11:38:02 +00:00
Amith Koujalgi
91ee6cb4c1 Added support for tools/function calling - specifically for Mistral's latest model. 2024-07-12 17:06:41 +05:30
amithkoujalgi
8ef6fac28e [maven-release-plugin] prepare for next development iteration 2024-07-11 19:37:04 +00:00
amithkoujalgi
d9e3860123 [maven-release-plugin] prepare release v1.0.75 2024-07-11 19:37:03 +00:00
Amith Koujalgi
515d1f0399 Update README.md 2024-07-12 01:05:56 +05:30
amithkoujalgi
be549430c5 [maven-release-plugin] prepare for next development iteration 2024-07-11 18:07:00 +00:00
amithkoujalgi
4744315d45 [maven-release-plugin] prepare release v1.0.74 2024-07-11 18:06:59 +00:00
Amith Koujalgi
8eea19a539 Added model types - gemma2 and qwen2 2024-07-11 23:35:39 +05:30
amithkoujalgi
b5801d84e0 [maven-release-plugin] prepare for next development iteration 2024-06-12 03:22:50 +00:00
amithkoujalgi
165d04b1bb [maven-release-plugin] prepare release v1.0.73 2024-06-12 03:22:49 +00:00
Amith Koujalgi
16d2160b52 Merge pull request #51 from ajt001/main 2024-06-12 08:51:43 +05:30
andrewtodd
e39c47b8e1 Add codestral as a model. 2024-06-09 16:43:46 +10:00
amithkoujalgi
bb0785140b [maven-release-plugin] prepare for next development iteration 2024-05-20 14:55:02 +00:00
amithkoujalgi
e33ad1a1e3 [maven-release-plugin] prepare release v1.0.72 2024-05-20 14:55:01 +00:00
Amith Koujalgi
cd60c506cb Merge pull request #47 from kelvinwatson/kelvinwatson/gradleDependencyInReadMe
Update README to include gradle project set up options
2024-05-20 20:24:03 +05:30
amithkoujalgi
b55925df28 [maven-release-plugin] prepare for next development iteration 2024-05-20 14:53:55 +00:00
amithkoujalgi
3a9b8c309d [maven-release-plugin] prepare release v1.0.71 2024-05-20 14:53:54 +00:00
Amith Koujalgi
bf07159522 Merge pull request #49 from AgentSchmecker/bugfix/48
Changes Datatype of ModelResponse fields to OffsetTime
2024-05-20 20:22:56 +05:30
AgentSchmecker
f8ca4d041d Changes DateTime types of Model.java to OffsetDatetime
Fixes #48
2024-05-20 11:10:03 +00:00
AgentSchmecker
9c6a55f7b0 Generalizes Abstract Serialization Test Class
Removes the "Request" naming context as this base class technically serves for general serialization purposes.
2024-05-20 11:08:49 +00:00
Kelvin Watson
2866d83a2f Update README.md 2024-05-19 11:58:08 -07:00
Kelvin Watson
45e5d07581 update README to include gradle options 2024-05-19 11:57:09 -07:00
amithkoujalgi
3a264cb6bb [maven-release-plugin] prepare for next development iteration 2024-05-19 13:57:34 +00:00
amithkoujalgi
e1b9d42771 [maven-release-plugin] prepare release v1.0.70 2024-05-19 13:57:32 +00:00
Amith Koujalgi
1a086c37c0 Merge pull request #46 from AgentSchmecker/model_update
Updates Model.java to be up to date with current OllamaAPI
2024-05-19 19:26:28 +05:30
Markus Klenke
54edba144c Merge pull request #2 from AgentSchmecker/model_update
Updates Model.java to be up to date with current OllamaAPI
2024-05-17 00:09:15 +02:00
AgentSchmecker
3ed3187ba9 Updates Model.java to be up to date with current OllamaAPI
Also adds Jackson-JSR310 for java.time JSON Mapping
2024-05-16 22:00:11 +00:00
amithkoujalgi
b7cd81a7f5 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:42:56 +00:00
amithkoujalgi
e750c2d7f9 [maven-release-plugin] prepare release v1.0.69 2024-05-14 05:42:55 +00:00
Amith Koujalgi
62f16131f3 Merge remote-tracking branch 'origin/main' 2024-05-14 11:11:51 +05:30
Amith Koujalgi
2cbaf12d7c Updated library usages in README.md 2024-05-14 11:11:38 +05:30
amithkoujalgi
e2d555d404 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:29:36 +00:00
amithkoujalgi
c296b34174 [maven-release-plugin] prepare release v1.0.68 2024-05-14 05:29:35 +00:00
Amith Koujalgi
e8f99f28ec Updated library usages in README.md 2024-05-14 10:58:29 +05:30
amithkoujalgi
250b1abc79 [maven-release-plugin] prepare for next development iteration 2024-05-14 05:07:20 +00:00
amithkoujalgi
42b15ad93f [maven-release-plugin] prepare release v1.0.67 2024-05-14 05:07:18 +00:00
Amith Koujalgi
6f7a714bae Merge remote-tracking branch 'origin/main' 2024-05-14 10:36:04 +05:30
Amith Koujalgi
92618e5084 Updated OllamaChatResponseModel to include done_reason field. Refer to the Ollama version: https://github.com/ollama/ollama/releases/tag/v0.1.37 2024-05-14 10:35:55 +05:30
amithkoujalgi
391a9242c3 [maven-release-plugin] prepare for next development iteration 2024-05-14 04:59:08 +00:00
amithkoujalgi
e1b6dc3b54 [maven-release-plugin] prepare release v1.0.66 2024-05-14 04:59:07 +00:00
Amith Koujalgi
04124cf978 Updated default request timeout to 10 seconds 2024-05-14 10:27:56 +05:30
amithkoujalgi
e4e717b747 [maven-release-plugin] prepare for next development iteration 2024-05-13 15:36:38 +00:00
amithkoujalgi
10d2a8f5ff [maven-release-plugin] prepare release v1.0.65 2024-05-13 15:36:37 +00:00
Amith Koujalgi
899fa38805 - Updated newly supported Ollama models
- Added `ConsoleOutputStreamHandler`
2024-05-13 21:05:20 +05:30
amithkoujalgi
2df878c953 [maven-release-plugin] prepare for next development iteration 2024-04-22 04:39:41 +00:00
amithkoujalgi
78a5eedc8f [maven-release-plugin] prepare release v1.0.64 2024-04-22 04:39:40 +00:00
Amith Koujalgi
364f961ee2 Merge pull request #37 from anjeongkyun/eddy/add-test1
Adds test case of OllamaChatRequestBuilder
2024-04-22 10:08:32 +05:30
anjeongkyun
b21aa6add2 Adds test of testWithKeepAlive 2024-04-21 23:52:42 +09:00
anjeongkyun
ec4abd1c2d Adds test of testWithStreaming 2024-04-21 23:49:42 +09:00
anjeongkyun
9900ae92fb Adds test of testWithTemplate 2024-04-21 23:43:49 +09:00
anjeongkyun
fa20daf6e5 Adds test case of testRequestWithOptions 2024-04-21 23:37:18 +09:00
amithkoujalgi
44949c0559 [maven-release-plugin] prepare for next development iteration 2024-03-20 10:44:54 +00:00
amithkoujalgi
e88711a017 [maven-release-plugin] prepare release v1.0.63 2024-03-20 10:44:52 +00:00
Amith Koujalgi
32169ded18 Merge pull request #33 from anjeongkyun/fix/prompt-builder-docs 2024-03-20 16:13:42 +05:30
anjeongkyun
4b2d566fd9 Fixes generate method of prompt builder 2024-03-19 21:04:26 +09:00
amithkoujalgi
fb4b7a7ce5 [maven-release-plugin] prepare for next development iteration 2024-03-02 17:49:32 +00:00
amithkoujalgi
18f27775b0 [maven-release-plugin] prepare release v1.0.62 2024-03-02 17:49:31 +00:00
Amith Koujalgi
cb462ad05a Merge remote-tracking branch 'origin/main' 2024-03-02 23:18:23 +05:30
Amith Koujalgi
1eec22ca1a Added release-assets GH action 2024-03-02 23:18:15 +05:30
amithkoujalgi
c1f3c51f88 [maven-release-plugin] prepare for next development iteration 2024-03-02 15:08:58 +00:00
amithkoujalgi
7dd556293f [maven-release-plugin] prepare release v1.0.61 2024-03-02 15:08:57 +00:00
Amith Koujalgi
ee50131ce4 Merge remote-tracking branch 'origin/main' 2024-03-02 20:37:45 +05:30
Amith Koujalgi
2cd47dbfaa [testing] - Added release-assets upload GH action 2024-03-02 20:37:38 +05:30
amithkoujalgi
e5296c1067 [maven-release-plugin] prepare for next development iteration 2024-03-02 14:58:49 +00:00
amithkoujalgi
0f00f05e3d [maven-release-plugin] prepare release v1.0.60 2024-03-02 14:58:48 +00:00
Amith Koujalgi
976a3b82e5 [testing] - Added release-assets upload GH action 2024-03-02 20:27:37 +05:30
amithkoujalgi
ba26d620c4 [maven-release-plugin] prepare for next development iteration 2024-03-02 14:46:51 +00:00
amithkoujalgi
e45246a767 [maven-release-plugin] prepare release v1.0.59 2024-03-02 14:46:50 +00:00
Amith Koujalgi
7336668f0c [testing] - Added release-assets upload GH action 2024-03-02 20:10:04 +05:30
amithkoujalgi
11701fb222 [maven-release-plugin] prepare for next development iteration 2024-02-29 03:25:18 +00:00
amithkoujalgi
b1ec12c4e9 [maven-release-plugin] prepare release v1.0.58 2024-02-29 03:25:17 +00:00
Amith Koujalgi
d0b0a0fc97 Updated model types 2024-02-29 08:54:03 +05:30
amithkoujalgi
20774fca6b [maven-release-plugin] prepare for next development iteration 2024-02-28 12:55:28 +00:00
amithkoujalgi
9c46b510d8 [maven-release-plugin] prepare release v1.0.57 2024-02-28 12:55:26 +00:00
Amith Koujalgi
9d887b60a8 Merge pull request #30 from AgentSchmecker/feature/options_for_embedding_request
Adds options to EmbeddingsRequest
2024-02-28 18:24:10 +05:30
Markus Klenke
63d4de4e24 Adds options to EmbeddingsRequest
Additionally refactors the Embedding Models and Tests
2024-02-25 20:53:45 +00:00
Amith Koujalgi
9224d2da06 Updated base route of docs 2024-02-22 11:50:41 +05:30
amithkoujalgi
a10692e2f1 [maven-release-plugin] prepare for next development iteration 2024-02-20 03:23:40 +00:00
amithkoujalgi
b0c152a42e [maven-release-plugin] prepare release v1.0.56 2024-02-20 03:23:39 +00:00
Amith Koujalgi
f44767e023 updated docs 2024-02-20 08:52:30 +05:30
amithkoujalgi
aadef0a57c [maven-release-plugin] prepare for next development iteration 2024-02-19 05:38:03 +00:00
amithkoujalgi
777ee7ffe0 [maven-release-plugin] prepare release v1.0.55 2024-02-19 05:38:02 +00:00
Amith Koujalgi
dcf1d0bdbc Merge pull request #29 from AgentSchmecker/feature/add-streaming-to-generate-api
Adds streaming feature to generate API
2024-02-19 11:06:57 +05:30
Markus Klenke
13b7111a42 Adds toString implementation for Model and ModelMeta to be json represented 2024-02-18 22:53:34 +00:00
Markus Klenke
09442d37a3 Fixes unmarshalling exception on ModelDetail 2024-02-18 22:53:01 +00:00
Markus Klenke
1e66bdb07f Adds documentation for streamed generate API call 2024-02-18 21:41:35 +00:00
Markus Klenke
b423090db9 Merge branch 'amithkoujalgi:main' into feature/add-streaming-to-generate-api 2024-02-18 22:30:20 +01:00
amithkoujalgi
a32d94efbf [maven-release-plugin] prepare for next development iteration 2024-02-18 05:22:10 +00:00
amithkoujalgi
31f8302849 [maven-release-plugin] prepare release v1.0.54 2024-02-18 05:22:08 +00:00
Amith Koujalgi
6487756764 Merge pull request #28 from AgentSchmecker/feature/advanced_params_for_generate
Adds advanced request parameters to chat/generate requests
2024-02-18 10:51:04 +05:30
Markus Klenke
abb76ad867 Adds streaming feature to Generate APIs 2024-02-16 17:03:15 +00:00
Markus Klenke
cf4e7a96e8 Optimizes ChatStreamObserver to use only the last message instead of parsing all messages again 2024-02-16 16:31:39 +00:00
Markus Klenke
0f414f71a3 Changes isEmpty method for BooleanToJsonFormatFlagSerializer to override non deprecated supermethod 2024-02-16 16:01:18 +00:00
Markus Klenke
2b700fdad8 Adds missing pom dependency for JSON comparison tests 2024-02-16 15:58:48 +00:00
Markus Klenke
06c5daa253 Adds additional properties to chat and generate requests 2024-02-16 15:57:48 +00:00
Markus Klenke
91aab6cbd1 Fixes recursive call in non streamed chat API 2024-02-16 15:57:14 +00:00
Markus Klenke
f38a00ebdc Fixes BooleanToJsonFormatFlagSerializer 2024-02-16 15:56:32 +00:00
Markus Klenke
0f73ea75ab Removes unnecessary serialize method of Serializer 2024-02-16 15:56:02 +00:00
Markus Klenke
8fe869afdb Adds additional request properties and refactors common request fields to OllamaCommonRequestModel 2024-02-16 13:15:24 +00:00
amithkoujalgi
2d274c4f5b [maven-release-plugin] prepare for next development iteration 2024-02-16 04:42:42 +00:00
amithkoujalgi
713a3239a4 [maven-release-plugin] prepare release v1.0.53 2024-02-16 04:42:40 +00:00
Amith Koujalgi
a9e7958d44 Merge pull request #26 from AgentSchmecker/main
Adds streaming functionality for chat
2024-02-16 10:11:32 +05:30
Markus Klenke
f38e84053f Adds documentation for streamed chat API call 2024-02-14 16:45:46 +00:00
Markus Klenke
7eb16b7ba0 Merge pull request #1 from AgentSchmecker/feature/streaming-chat
Adds streaming functionality for chat
2024-02-14 16:30:51 +01:00
amithkoujalgi
5a3889d8ee [maven-release-plugin] prepare for next development iteration 2024-02-14 13:44:30 +00:00
amithkoujalgi
2c52f4d0bb [maven-release-plugin] prepare release v1.0.52 2024-02-14 13:44:29 +00:00
Amith Koujalgi
32c4231eb5 Merge pull request #25 from AgentSchmecker/feature/chat_messages_with_images
Adds image capability to chat API
2024-02-14 19:13:22 +05:30
Markus Klenke
e9621f054d Adds integration test for chat streaming API 2024-02-13 18:11:59 +00:00
Markus Klenke
b41b62220c Adds chat with stream functionality in OllamaAPI 2024-02-13 17:59:27 +00:00
Markus Klenke
c89440cbca Adds OllamaStream handling 2024-02-13 17:56:07 +00:00
Markus Klenke
1aeb555a53 Adds documentation for chat with images use case 2024-02-13 10:22:13 +00:00
Markus Klenke
9aff3ec5d9 Extends chat with image integration test to use history 2024-02-13 10:20:37 +00:00
Markus Klenke
b4eaf0cfb5 Fixes ChatRequestBuilder 2024-02-13 10:20:06 +00:00
Markus Klenke
199cb6082d Cleans up unused imports 2024-02-12 22:09:55 +00:00
Markus Klenke
37bfe26a6d Adds integrationtest for chatImagesByURL and formats test cases 2024-02-12 22:08:53 +00:00
Markus Klenke
3769386539 Changes images property of ChatMessage to type byte[] 2024-02-12 22:08:10 +00:00
Markus Klenke
84a6e57f42 Refactors loadImageBytesFromUrl to Utils class 2024-02-12 22:07:15 +00:00
Markus Klenke
14d2474ee9 Adds integrationtestcase for chat message with image content 2024-02-12 21:35:43 +00:00
Markus Klenke
ca613ed80a Adds base64 encoding for chat-API request messages 2024-02-12 21:32:07 +00:00
Markus Klenke
bbcd458849 Adds sync-response interpreter for responseCode 400 2024-02-12 21:30:55 +00:00
Markus Klenke
bc885894f8 Extends OllamaChatResponseModel with error property 2024-02-12 21:29:27 +00:00
amithkoujalgi
bc83df6971 [maven-release-plugin] prepare for next development iteration 2024-02-12 16:34:11 +00:00
amithkoujalgi
43f43c9f81 [maven-release-plugin] prepare release v1.0.51 2024-02-12 16:34:10 +00:00
Amith Koujalgi
65f00defcf Updates to javadoc 2024-02-12 22:03:01 +05:30
Amith Koujalgi
d716b81342 Merge pull request #23 from AgentSchmecker/feature/chat-request-model
Adds feature for /api/chat access via OllamaAPI
2024-02-12 21:36:33 +05:30
Markus Klenke
272ba445f6 Moves documentation to end of sidebar group 2024-02-10 23:23:01 +00:00
Markus Klenke
d9816d8869 Removes unused import 2024-02-10 23:13:11 +00:00
Markus Klenke
874736eb16 adds documentation for chat API 2024-02-10 22:58:50 +00:00
Markus Klenke
9c16ccbf81 Changes test texts for chat integration tests 2024-02-10 22:58:29 +00:00
Markus Klenke
40a3aa31dc Adds helper reset method to OllamaChatRequestBuilder 2024-02-10 22:51:47 +00:00
Markus Klenke
90669b611b Changes implementation of withMessages in ChatRequestBuilder 2024-02-10 22:28:50 +00:00
Markus Klenke
f10c7ac725 Merge remote-tracking branch 'upstream/main' into feature/chat-request-model 2024-02-10 22:09:59 +00:00
amithkoujalgi
38dca3cd0d [maven-release-plugin] prepare for next development iteration 2024-02-10 05:13:11 +00:00
amithkoujalgi
44bb35b168 [maven-release-plugin] prepare release v1.0.50 2024-02-10 05:13:09 +00:00
Amith Koujalgi
9832caf503 Update intg test class 2024-02-10 10:41:53 +05:30
Markus Klenke
0c4e8e306e Organizes imports 2024-02-10 00:26:25 +00:00
Markus Klenke
075416eb9c Adds Integration tests for chat API 2024-02-10 00:25:09 +00:00
Markus Klenke
4260fbbc32 Fixes OllamaAPI after Renaming 2024-02-10 00:24:12 +00:00
Markus Klenke
0bec697a86 Specifies OllamaChatResponseModel 2024-02-10 00:23:26 +00:00
Markus Klenke
4ca6eef8fd Fixes JSON Binding for OllamaChatMessageRole 2024-02-10 00:22:55 +00:00
Markus Klenke
a635dd9be2 Renames request caller to endpoint caller 2024-02-10 00:22:21 +00:00
Markus Klenke
14982011d9 Adds chat-API with OllamaChatResult 2024-02-09 22:49:28 +00:00
Markus Klenke
65d852fdc9 Creates OllamaChatResult extension class 2024-02-09 22:48:48 +00:00
Markus Klenke
d483c23c81 Adds documentation to OllamaServerCaller 2024-02-09 22:48:19 +00:00
Markus Klenke
273b1e47ca Removes unneccessary override of OllamaRequestBody method 2024-02-09 21:58:17 +00:00
Markus Klenke
5c5cdba4cd Merge remote-tracking branch 'origin/main' into feature/chat-request-model 2024-02-09 21:56:10 +00:00
amithkoujalgi
24674ea483 [maven-release-plugin] prepare for next development iteration 2024-02-09 18:19:37 +00:00
amithkoujalgi
5d3a975e4c [maven-release-plugin] prepare release v1.0.49 2024-02-09 18:19:36 +00:00
Amith Koujalgi
ad670c3c62 Merge pull request #22 from AgentSchmecker/bugfix/add_model_property_to_model_api
Adds model property to Model Type
2024-02-09 23:48:36 +05:30
Markus Klenke
f9063484f3 Fixes recursive JacksonMarshalling on BodyPublisher 2024-02-09 17:07:18 +00:00
Markus Klenke
5e2a07ad41 Adds model property to Model Type 2024-02-09 16:54:09 +00:00
Markus Klenke
00a3e51a93 Extends OllamaAPI by Chat methods and refactors synchronous Generate API Methods 2024-02-09 16:32:52 +00:00
Markus Klenke
bc20468f28 Applies OllamaRequestBody to OllamaRequestModel 2024-02-09 16:30:12 +00:00
Markus Klenke
c7ac50a805 Defines technical Request Caller Object 2024-02-09 16:29:35 +00:00
Markus Klenke
f8cd7bc013 Adds model for Chat Requests to Ollama-API 2024-02-09 16:28:27 +00:00
Markus Klenke
3469bf314b Removes unused Imports 2024-02-09 13:36:49 +00:00
amithkoujalgi
9636807819 [maven-release-plugin] prepare for next development iteration 2024-02-09 06:39:34 +00:00
amithkoujalgi
455251d1d4 [maven-release-plugin] prepare release v1.0.48 2024-02-09 06:39:33 +00:00
Amith Koujalgi
ec00ffae7f Merge pull request #21 from AgentSchmecker/main
Correction of Documentation for ask-async
2024-02-09 12:08:33 +05:30
Markus Klenke
d969c7ad46 Fixes currently not working code snippet for ask-async doc 2024-02-08 22:52:41 +00:00
amithkoujalgi
02bf769188 [maven-release-plugin] prepare for next development iteration 2024-02-01 14:39:21 +00:00
amithkoujalgi
1c8a6b4f2a [maven-release-plugin] prepare release v1.0.47 2024-02-01 14:39:20 +00:00
Amith Koujalgi
60fe5d6ffb Update APIs from ask to generate 2024-02-01 20:08:01 +05:30
amithkoujalgi
327ae7437f [maven-release-plugin] prepare for next development iteration 2024-01-23 19:43:40 +00:00
amithkoujalgi
795b9f2b9b [maven-release-plugin] prepare release v1.0.46 2024-01-23 19:43:39 +00:00
Amith Koujalgi
54da069e68 Merge remote-tracking branch 'origin/main' 2024-01-24 01:12:32 +05:30
Amith Koujalgi
bfc5cebac1 clean up 2024-01-24 01:12:21 +05:30
amithkoujalgi
d46b1d48d8 [maven-release-plugin] prepare for next development iteration 2024-01-23 19:39:05 +00:00
amithkoujalgi
96320e7761 [maven-release-plugin] prepare release v1.0.45 2024-01-23 19:39:04 +00:00
Amith Koujalgi
e6472f0a81 clean up 2024-01-24 01:07:51 +05:30
amithkoujalgi
816bbd9bbf [maven-release-plugin] prepare for next development iteration 2024-01-04 04:30:00 +00:00
amithkoujalgi
da1123271d [maven-release-plugin] prepare release v1.0.44 2024-01-04 04:29:59 +00:00
Amith Koujalgi
12f099260f clean up 2024-01-04 09:58:53 +05:30
amithkoujalgi
35728ae208 [maven-release-plugin] prepare for next development iteration 2024-01-03 10:50:04 +00:00
amithkoujalgi
7dba9cc798 [maven-release-plugin] prepare release v1.0.43 2024-01-03 10:50:02 +00:00
Amith Koujalgi
bb1c920e22 - updated askWithImageFiles and askWithImageURLs APIs to use Options
- updated jacoco version
2024-01-03 16:18:49 +05:30
amithkoujalgi
770cbd7639 [maven-release-plugin] prepare for next development iteration 2024-01-02 18:10:40 +00:00
amithkoujalgi
b43c9b8d93 [maven-release-plugin] prepare release v1.0.42 2024-01-02 18:10:38 +00:00
Amith Koujalgi
935964c9b0 Merge remote-tracking branch 'origin/main' 2024-01-02 23:39:29 +05:30
Amith Koujalgi
9aed9a5237 added OptionsBuilder and support for specifying extra params for ask API 2024-01-02 23:39:18 +05:30
amithkoujalgi
6c082c94c4 [maven-release-plugin] prepare for next development iteration 2024-01-02 17:49:26 +00:00
amithkoujalgi
6c93b8304a [maven-release-plugin] prepare release v1.0.41 2024-01-02 17:49:24 +00:00
Amith Koujalgi
85acf0fe78 added OptionsBuilder and support for specifying extra params for ask API 2024-01-02 23:18:17 +05:30
amithkoujalgi
fe64c6dd10 [maven-release-plugin] prepare for next development iteration 2023-12-30 20:05:10 +00:00
amithkoujalgi
b15066a204 [maven-release-plugin] prepare release v1.0.40 2023-12-30 20:05:08 +00:00
Amith Koujalgi
e2b29b6a07 added Prompt Builder 2023-12-31 01:33:59 +05:30
amithkoujalgi
7470ebe846 [maven-release-plugin] prepare for next development iteration 2023-12-30 17:42:18 +00:00
amithkoujalgi
422efa68aa [maven-release-plugin] prepare release v1.0.39 2023-12-30 17:42:17 +00:00
Amith Koujalgi
f4d8671922 updated docs 2023-12-30 23:10:50 +05:30
amithkoujalgi
70b136c9fc [maven-release-plugin] prepare for next development iteration 2023-12-30 15:53:15 +00:00
amithkoujalgi
7adb5e93c7 [maven-release-plugin] prepare release v1.0.38 2023-12-30 15:53:14 +00:00
Amith Koujalgi
a8b7117878 updated docs 2023-12-30 21:22:04 +05:30
amithkoujalgi
3bd99cd1e8 [maven-release-plugin] prepare for next development iteration 2023-12-30 08:48:46 +00:00
amithkoujalgi
1d6af26857 [maven-release-plugin] prepare release v1.0.37 2023-12-30 08:48:45 +00:00
Amith Koujalgi
14d18d731f Fixed javadoc 2023-12-30 14:17:36 +05:30
amithkoujalgi
c8d7cbbc2c [maven-release-plugin] prepare for next development iteration 2023-12-30 08:34:44 +00:00
136 changed files with 10483 additions and 2980 deletions

58
.github/workflows/gh-mvn-publish.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: Release Artifacts to GitHub Maven Packages
on:
release:
types: [ created ]
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
server-id: github
settings-path: ${{ github.workspace }}
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
with:
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: github
regex: false
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Build with Maven
run: mvn --file pom.xml -U clean package -Punit-tests
- name: Publish to GitHub Packages Apache Maven
run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

24
.github/workflows/label-issue-stale.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: Mark stale issues
on:
workflow_dispatch: # for manual run
schedule:
- cron: '0 0 * * *' # Runs every day at midnight
permissions:
contents: write # only for delete-branch option
issues: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- name: Mark stale issues
uses: actions/stale@v8
with:
repo-token: ${{ github.token }}
days-before-stale: 15
stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs.'
days-before-close: 7
stale-issue-label: 'stale'
exempt-issue-labels: 'pinned,security'

View File

@@ -1,68 +1,95 @@
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
name: Test and Publish Package
#on:
# release:
# types: [ "created" ]
name: Release Artifacts to Maven Central
on:
push:
branches: [ "main" ]
workflow_dispatch:
release:
types: [ created ]
#on:
# pull_request:
# types: [ opened, reopened ]
# branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
packages: write
steps:
- uses: actions/checkout@v3
- name: Set up JDK 11
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '11'
distribution: 'adopt-hotspot'
java-version: '17'
distribution: 'temurin'
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: Build with Maven
run: mvn --file pom.xml -U clean package -Punit-tests
- name: Set up Apache Maven Central (Overwrite settings.xml)
uses: actions/setup-java@v3
with: # running setup-java again overwrites the settings.xml
java-version: '11'
distribution: 'adopt-hotspot'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
gpg-passphrase: MAVEN_GPG_PASSPHRASE
- name: Set up Maven cache
uses: actions/cache@v3
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Build
run: mvn -B -ntp clean install
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: central
regex: false
- name: Publish to Maven Central
run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
- name: Upload Release Asset - JAR
uses: actions/upload-release-asset@v1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Publish to GitHub Packages Apache Maven
# if: >
# github.event_name != 'pull_request' &&
# github.ref_name == 'main' &&
# contains(github.event.head_commit.message, 'release')
run: |
git config --global user.email "koujalgi.amith@gmail.com"
git config --global user.name "amithkoujalgi"
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}.jar
asset_name: ollama4j-${{ github.ref_name }}.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Javadoc JAR
uses: actions/upload-release-asset@v1
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Sources JAR
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
asset_name: ollama4j-${{ github.ref_name }}-sources.jar
asset_content_type: application/x-jar

View File

@@ -2,9 +2,8 @@
name: Deploy Docs to GH Pages
on:
# Runs on pushes targeting the default branch
push:
branches: [ "main" ]
release:
types: [ created ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@@ -47,16 +46,29 @@ jobs:
- run: cd docs && npm ci
- run: cd docs && npm run build
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Build with Maven
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
- name: Doxygen Action
uses: mattnotmitt/doxygen-action@v1.1.0
with:
doxyfile-path: "./Doxyfile"
working-directory: "."
- name: Setup Pages
uses: actions/configure-pages@v3
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v2
uses: actions/upload-pages-artifact@v3
with:
# Upload entire repository
path: './docs/build/.'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2
uses: actions/deploy-pages@v4

View File

@@ -1,52 +0,0 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy Javadoc content to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: [ "none" ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
packages: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
runs-on: ubuntu-latest
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- uses: actions/checkout@v3
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
java-version: '11'
distribution: 'adopt-hotspot'
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: Build with Maven
run: mvn --file pom.xml -U clean package
- name: Setup Pages
uses: actions/configure-pages@v3
- name: Upload artifact
uses: actions/upload-pages-artifact@v2
with:
# Upload entire repository
path: './target/apidocs/.'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2

29
.github/workflows/run-tests.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
name: Run Tests
on:
workflow_dispatch:
inputs:
branch:
description: 'Branch name to run tests on'
required: true
default: 'main'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.branch }}
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
server-id: github
settings-path: ${{ github.workspace }}
- name: Run unit tests
run: mvn clean test -Punit-tests

6
.gitignore vendored
View File

@@ -37,6 +37,8 @@ build/
### Mac OS ###
.DS_Store
/.idea/
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
pom.xml.*
release.properties
release.properties
!.idea/icon.svg
src/main/java/io/github/ollama4j/localtests

18
.idea/icon.svg generated Normal file
View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
</svg>

After

Width:  |  Height:  |  Size: 6.1 KiB

128
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
koujalgi.amith@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

413
Doxyfile Normal file
View File

@@ -0,0 +1,413 @@
# Doxyfile 1.10.0
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
DOXYFILE_ENCODING = UTF-8
PROJECT_NAME = "Ollama4j"
PROJECT_NUMBER =
PROJECT_BRIEF = "A Java library (wrapper/binding) for Ollama server."
PROJECT_LOGO = ./logo-small.png
PROJECT_ICON = ./logo-small.png
OUTPUT_DIRECTORY = ./docs/build/doxygen
CREATE_SUBDIRS = NO
CREATE_SUBDIRS_LEVEL = 8
ALLOW_UNICODE_NAMES = NO
OUTPUT_LANGUAGE = English
BRIEF_MEMBER_DESC = YES
REPEAT_BRIEF = YES
ABBREVIATE_BRIEF = "The $name class" \
"The $name widget" \
"The $name file" \
is \
provides \
specifies \
contains \
represents \
a \
an \
the
ALWAYS_DETAILED_SEC = NO
INLINE_INHERITED_MEMB = NO
FULL_PATH_NAMES = YES
STRIP_FROM_PATH =
STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = NO
JAVADOC_BANNER = NO
QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
PYTHON_DOCSTRING = YES
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 4
ALIASES =
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = YES
OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_OUTPUT_VHDL = NO
OPTIMIZE_OUTPUT_SLICE = NO
EXTENSION_MAPPING =
MARKDOWN_SUPPORT = YES
TOC_INCLUDE_HEADINGS = 5
MARKDOWN_ID_STYLE = DOXYGEN
AUTOLINK_SUPPORT = YES
BUILTIN_STL_SUPPORT = NO
CPP_CLI_SUPPORT = NO
SIP_SUPPORT = NO
IDL_PROPERTY_SUPPORT = YES
DISTRIBUTE_GROUP_DOC = NO
GROUP_NESTED_COMPOUNDS = NO
SUBGROUPING = YES
INLINE_GROUPED_CLASSES = NO
INLINE_SIMPLE_STRUCTS = NO
TYPEDEF_HIDES_STRUCT = NO
LOOKUP_CACHE_SIZE = 0
NUM_PROC_THREADS = 1
TIMESTAMP = NO
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
EXTRACT_ALL = YES
EXTRACT_PRIVATE = NO
EXTRACT_PRIV_VIRTUAL = NO
EXTRACT_PACKAGE = NO
EXTRACT_STATIC = NO
EXTRACT_LOCAL_CLASSES = YES
EXTRACT_LOCAL_METHODS = NO
EXTRACT_ANON_NSPACES = NO
RESOLVE_UNNAMED_PARAMS = YES
HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
HIDE_FRIEND_COMPOUNDS = NO
HIDE_IN_BODY_DOCS = NO
INTERNAL_DOCS = NO
CASE_SENSE_NAMES = SYSTEM
HIDE_SCOPE_NAMES = NO
HIDE_COMPOUND_REFERENCE= NO
SHOW_HEADERFILE = YES
SHOW_INCLUDE_FILES = YES
SHOW_GROUPED_MEMB_INC = NO
FORCE_LOCAL_INCLUDES = NO
INLINE_INFO = YES
SORT_MEMBER_DOCS = YES
SORT_BRIEF_DOCS = NO
SORT_MEMBERS_CTORS_1ST = NO
SORT_GROUP_NAMES = NO
SORT_BY_SCOPE_NAME = NO
STRICT_PROTO_MATCHING = NO
GENERATE_TODOLIST = YES
GENERATE_TESTLIST = YES
GENERATE_BUGLIST = YES
GENERATE_DEPRECATEDLIST= YES
ENABLED_SECTIONS =
MAX_INITIALIZER_LINES = 30
SHOW_USED_FILES = YES
SHOW_FILES = YES
SHOW_NAMESPACES = YES
FILE_VERSION_FILTER =
LAYOUT_FILE =
CITE_BIB_FILES =
#---------------------------------------------------------------------------
# Configuration options related to warning and progress messages
#---------------------------------------------------------------------------
QUIET = NO
WARNINGS = YES
WARN_IF_UNDOCUMENTED = YES
WARN_IF_DOC_ERROR = YES
WARN_IF_INCOMPLETE_DOC = YES
WARN_NO_PARAMDOC = NO
WARN_IF_UNDOC_ENUM_VAL = NO
WARN_AS_ERROR = NO
WARN_FORMAT = "$file:$line: $text"
WARN_LINE_FORMAT = "at line $line of file $file"
WARN_LOGFILE =
#---------------------------------------------------------------------------
# Configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = ./src/main
INPUT_ENCODING = UTF-8
INPUT_FILE_ENCODING =
FILE_PATTERNS = *.c \
*.cc \
*.cxx \
*.cxxm \
*.cpp \
*.cppm \
*.ccm \
*.c++ \
*.c++m \
*.java \
*.ii \
*.ixx \
*.ipp \
*.i++ \
*.inl \
*.idl \
*.ddl \
*.odl \
*.h \
*.hh \
*.hxx \
*.hpp \
*.h++ \
*.ixx \
*.l \
*.cs \
*.d \
*.php \
*.php4 \
*.php5 \
*.phtml \
*.inc \
*.m \
*.markdown \
*.md \
*.mm \
*.dox \
*.py \
*.pyw \
*.f90 \
*.f95 \
*.f03 \
*.f08 \
*.f18 \
*.f \
*.for \
*.vhd \
*.vhdl \
*.ucf \
*.qsf \
*.ice
RECURSIVE = YES
EXCLUDE =
EXCLUDE_SYMLINKS = NO
EXCLUDE_PATTERNS =
EXCLUDE_SYMBOLS =
EXAMPLE_PATH =
EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = NO
IMAGE_PATH =
INPUT_FILTER =
FILTER_PATTERNS =
FILTER_SOURCE_FILES = NO
FILTER_SOURCE_PATTERNS =
USE_MDFILE_AS_MAINPAGE =
FORTRAN_COMMENT_AFTER = 72
#---------------------------------------------------------------------------
# Configuration options related to source browsing
#---------------------------------------------------------------------------
SOURCE_BROWSER = YES
INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = YES
REFERENCED_BY_RELATION = NO
REFERENCES_RELATION = NO
REFERENCES_LINK_SOURCE = YES
SOURCE_TOOLTIPS = YES
USE_HTAGS = NO
VERBATIM_HEADERS = YES
CLANG_ASSISTED_PARSING = NO
CLANG_ADD_INC_PATHS = YES
CLANG_OPTIONS =
CLANG_DATABASE_PATH =
#---------------------------------------------------------------------------
# Configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
ALPHABETICAL_INDEX = YES
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the HTML output
#---------------------------------------------------------------------------
GENERATE_HTML = YES
HTML_OUTPUT = html
HTML_FILE_EXTENSION = .html
HTML_HEADER =
HTML_FOOTER =
HTML_STYLESHEET =
HTML_EXTRA_STYLESHEET =
HTML_EXTRA_FILES =
HTML_COLORSTYLE = LIGHT
HTML_COLORSTYLE_HUE = 220
HTML_COLORSTYLE_SAT = 100
HTML_COLORSTYLE_GAMMA = 80
HTML_DYNAMIC_MENUS = YES
HTML_DYNAMIC_SECTIONS = NO
HTML_CODE_FOLDING = YES
HTML_COPY_CLIPBOARD = YES
HTML_PROJECT_COOKIE =
HTML_INDEX_NUM_ENTRIES = 100
GENERATE_DOCSET = NO
DOCSET_FEEDNAME = "Doxygen generated docs"
DOCSET_FEEDURL =
DOCSET_BUNDLE_ID = org.doxygen.Project
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
DOCSET_PUBLISHER_NAME = Publisher
GENERATE_HTMLHELP = NO
CHM_FILE =
HHC_LOCATION =
GENERATE_CHI = NO
CHM_INDEX_ENCODING =
BINARY_TOC = NO
TOC_EXPAND = NO
SITEMAP_URL =
GENERATE_QHP = NO
QCH_FILE =
QHP_NAMESPACE = org.doxygen.Project
QHP_VIRTUAL_FOLDER = doc
QHP_CUST_FILTER_NAME =
QHP_CUST_FILTER_ATTRS =
QHP_SECT_FILTER_ATTRS =
QHG_LOCATION =
GENERATE_ECLIPSEHELP = NO
ECLIPSE_DOC_ID = org.doxygen.Project
DISABLE_INDEX = NO
GENERATE_TREEVIEW = YES
FULL_SIDEBAR = NO
ENUM_VALUES_PER_LINE = 4
TREEVIEW_WIDTH = 250
EXT_LINKS_IN_WINDOW = NO
OBFUSCATE_EMAILS = YES
HTML_FORMULA_FORMAT = png
FORMULA_FONTSIZE = 10
FORMULA_MACROFILE =
USE_MATHJAX = NO
MATHJAX_VERSION = MathJax_2
MATHJAX_FORMAT = HTML-CSS
MATHJAX_RELPATH =
MATHJAX_EXTENSIONS =
MATHJAX_CODEFILE =
SEARCHENGINE = YES
SERVER_BASED_SEARCH = NO
EXTERNAL_SEARCH = NO
SEARCHENGINE_URL =
SEARCHDATA_FILE = searchdata.xml
EXTERNAL_SEARCH_ID =
EXTRA_SEARCH_MAPPINGS =
#---------------------------------------------------------------------------
# Configuration options related to the LaTeX output
#---------------------------------------------------------------------------
GENERATE_LATEX = YES
LATEX_OUTPUT = latex
LATEX_CMD_NAME =
MAKEINDEX_CMD_NAME = makeindex
LATEX_MAKEINDEX_CMD = makeindex
COMPACT_LATEX = NO
PAPER_TYPE = a4
EXTRA_PACKAGES =
LATEX_HEADER =
LATEX_FOOTER =
LATEX_EXTRA_STYLESHEET =
LATEX_EXTRA_FILES =
PDF_HYPERLINKS = YES
USE_PDFLATEX = YES
LATEX_BATCHMODE = NO
LATEX_HIDE_INDICES = NO
LATEX_BIB_STYLE = plain
LATEX_EMOJI_DIRECTORY =
#---------------------------------------------------------------------------
# Configuration options related to the RTF output
#---------------------------------------------------------------------------
GENERATE_RTF = NO
RTF_OUTPUT = rtf
COMPACT_RTF = NO
RTF_HYPERLINKS = NO
RTF_STYLESHEET_FILE =
RTF_EXTENSIONS_FILE =
#---------------------------------------------------------------------------
# Configuration options related to the man page output
#---------------------------------------------------------------------------
GENERATE_MAN = NO
MAN_OUTPUT = man
MAN_EXTENSION = .3
MAN_SUBDIR =
MAN_LINKS = NO
#---------------------------------------------------------------------------
# Configuration options related to the XML output
#---------------------------------------------------------------------------
GENERATE_XML = NO
XML_OUTPUT = xml
XML_PROGRAMLISTING = YES
XML_NS_MEMB_FILE_SCOPE = NO
#---------------------------------------------------------------------------
# Configuration options related to the DOCBOOK output
#---------------------------------------------------------------------------
GENERATE_DOCBOOK = NO
DOCBOOK_OUTPUT = docbook
#---------------------------------------------------------------------------
# Configuration options for the AutoGen Definitions output
#---------------------------------------------------------------------------
GENERATE_AUTOGEN_DEF = NO
#---------------------------------------------------------------------------
# Configuration options related to Sqlite3 output
#---------------------------------------------------------------------------
GENERATE_SQLITE3 = NO
SQLITE3_OUTPUT = sqlite3
SQLITE3_RECREATE_DB = YES
#---------------------------------------------------------------------------
# Configuration options related to the Perl module output
#---------------------------------------------------------------------------
GENERATE_PERLMOD = NO
PERLMOD_LATEX = NO
PERLMOD_PRETTY = YES
PERLMOD_MAKEVAR_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the preprocessor
#---------------------------------------------------------------------------
ENABLE_PREPROCESSING = YES
MACRO_EXPANSION = NO
EXPAND_ONLY_PREDEF = NO
SEARCH_INCLUDES = YES
INCLUDE_PATH =
INCLUDE_FILE_PATTERNS =
PREDEFINED =
EXPAND_AS_DEFINED =
SKIP_FUNCTION_MACROS = YES
#---------------------------------------------------------------------------
# Configuration options related to external references
#---------------------------------------------------------------------------
TAGFILES =
GENERATE_TAGFILE =
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
EXTERNAL_PAGES = YES
#---------------------------------------------------------------------------
# Configuration options related to diagram generator tools
#---------------------------------------------------------------------------
HIDE_UNDOC_RELATIONS = YES
HAVE_DOT = NO
DOT_NUM_THREADS = 0
DOT_COMMON_ATTR = "fontname=Helvetica,fontsize=10"
DOT_EDGE_ATTR = "labelfontname=Helvetica,labelfontsize=10"
DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
DOT_FONTPATH =
CLASS_GRAPH = YES
COLLABORATION_GRAPH = YES
GROUP_GRAPHS = YES
UML_LOOK = NO
UML_LIMIT_NUM_FIELDS = 10
DOT_UML_DETAILS = NO
DOT_WRAP_THRESHOLD = 17
TEMPLATE_RELATIONS = NO
INCLUDE_GRAPH = YES
INCLUDED_BY_GRAPH = YES
CALL_GRAPH = NO
CALLER_GRAPH = NO
GRAPHICAL_HIERARCHY = YES
DIRECTORY_GRAPH = YES
DIR_GRAPH_MAX_DEPTH = 1
DOT_IMAGE_FORMAT = png
INTERACTIVE_SVG = NO
DOT_PATH =
DOTFILE_DIRS =
DIA_PATH =
DIAFILE_DIRS =
PLANTUML_JAR_PATH =
PLANTUML_CFG_FILE =
PLANTUML_INCLUDE_PATH =
DOT_GRAPH_MAX_NODES = 50
MAX_DOT_GRAPH_DEPTH = 0
DOT_MULTI_TARGETS = NO
GENERATE_LEGEND = YES
DOT_CLEANUP = YES
MSCGEN_TOOL =
MSCFILE_DIRS =

View File

@@ -1,13 +1,28 @@
build:
mvn -B clean install
ut:
unit-tests:
mvn clean test -Punit-tests
it:
integration-tests:
mvn clean verify -Pintegration-tests
doxygen:
doxygen Doxyfile
list-releases:
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \
--compressed \
--silent | jq '.components[].version'
--silent | jq -r '.components[].version'
build-docs:
npm i --prefix docs && npm run build --prefix docs
start-docs:
npm i --prefix docs && npm run start --prefix docs
start-cpu:
docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
start-gpu:
docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama

322
README.md
View File

@@ -1,31 +1,51 @@
### Ollama4j
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
<p align="center">
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
</p>
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
<div align="center">
A Java library (wrapper/binding) for Ollama server.
Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j)
![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j)
![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j)
![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j)
![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=green)
![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false)
![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j)
![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social)
![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j)
[![codecov](https://codecov.io/gh/amithkoujalgi/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/amithkoujalgi/ollama4j)
![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
[//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;)
[//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;)
[//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;)
[//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;)
[//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;)
[//]: # (![GitHub Downloads &#40;all assets, all releases&#41;]&#40;https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads&#41;)
![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green)
[![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j)
![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
</div>
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;)
[//]: # (![GitHub language count]&#40;https://img.shields.io/github/languages/count/ollama4j/ollama4j&#41;)
## Table of Contents
- [How does it work?](#how-does-it-work)
- [Requirements](#requirements)
- [Installation](#installation)
- [API Spec](#api-spec)
- [Demo APIs](#try-out-the-apis-with-ollama-server)
- [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
- [Development](#development)
- [Contributions](#get-involved)
- [References](#references)
@@ -46,44 +66,181 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
#### Requirements
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=just-the-message&labelColor=gray)
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange)
[![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker]
[ollama]: https://ollama.ai/
<a href="https://ollama.com/" target="_blank">
<img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
</a>
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray
<table>
<tr>
<td>
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
<a href="https://ollama.ai/" target="_blank">Local Installation</a>
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray
</td>
#### Installation
<td>
<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
</td>
</tr>
<tr>
<td>
<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
Install on Linux
```shell
curl -fsSL https://ollama.com/install.sh | sh
```
</td>
<td>
CPU only
```shell
docker run -d -p 11434:11434 \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
NVIDIA GPU
```shell
docker run -d -p 11434:11434 \
--gpus=all \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
</td>
</tr>
</table>
## Installation
> [!NOTE]
> We are now publishing the artifacts to both Maven Central and GitHub package repositories.
>
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
> according to your requirements.
### For Maven
#### Using [Maven Central](https://central.sonatype.com/)
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
[ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
In your Maven project, add this dependency:
```xml
<dependency>
<groupId>io.github.amithkoujalgi</groupId>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.29</version>
<version>1.0.93</version>
</dependency>
```
Latest release:
#### Using GitHub's Maven Package Repository
![Maven Central](https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j)
[![][ollama4j-releases-shield]][ollama4j-releases-link]
[![][lib-shield]][lib]
[ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
```xml
<repositories>
<repository>
<id>github</id>
<name>GitHub Apache Maven Packages</name>
<url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
```
2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
```xml
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
http://maven.apache.org/xsd/settings-1.0.0.xsd">
<servers>
<server>
<id>github</id>
<username>YOUR-USERNAME</username>
<password>YOUR-TOKEN</password>
</server>
</servers>
</settings>
```
3. In your Maven project, add this dependency:
```xml
<dependency>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.93</version>
</dependency>
```
### For Gradle
1. Add the dependency
```groovy
dependencies {
implementation 'io.github.ollama4j:ollama4j:1.0.93'
}
```
[//]: # (Latest release:)
[//]: # ()
[//]: # (![Maven Central]&#40;https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j&#41;)
[//]: # ()
[//]: # ([![][lib-shield]][lib])
[lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
#### API Spec
Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/).
> [!TIP]
> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
#### Development
@@ -96,57 +253,106 @@ make build
Run unit tests:
```shell
make ut
make unit-tests
```
Run integration tests:
```shell
make it
make integration-tests
```
#### Releases
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
Actions CI workflow.
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
#### Traction
## ⭐ Give us a Star!
[![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date)
If you like or are using this project to build your own, please give us a star. It's a free way to show your support.
### Areas of improvement
## Who's using Ollama4j?
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
snake-case conventions. (
possibly with Jackson-mapper's `@JsonProperty`)
- [x] Fix deprecated HTTP client code
- [x] Setup logging
- [x] Use lombok
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
- [ ] Add custom headers to requests
- [ ] Add additional params for `ask` APIs such as:
- `options`: additional model parameters for the Modelfile such as `temperature`
- `system`: system prompt to (overrides what is defined in the Modelfile)
- `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- `context`: the context parameter returned from a previous request, which can be used to keep a
short
conversational memory
- `stream`: Add support for streaming responses from the model
- [ ] Add test cases
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
| # | Project Name | Description | Link |
|---|-------------------|---------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------|
| 1 | Datafaker | A library to generate fake data | [GitHub](https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api) |
| 2 | Vaadin Web UI | UI-Tester for interactions with Ollama via ollama4j | [GitHub](https://github.com/TEAMPB/ollama4j-vaadin-ui) |
| 3 | ollama-translator | A Minecraft 1.20.6 Spigot plugin that translates all messages into a specific target language via Ollama | [GitHub](https://github.com/liebki/ollama-translator) |
| 4 | AI Player | A Minecraft mod that adds an intelligent "second player" to the game | [GitHub](https://github.com/shasankp000/AI-Player), <br/> [Reddit Thread](https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/) |
| 5 | Ollama4j Web UI | A web UI for Ollama written in Java using Spring Boot, Vaadin, and Ollama4j | [GitHub](https://github.com/ollama4j/ollama4j-web-ui) |
| 6 | JnsCLI | A command-line tool for Jenkins that manages jobs, builds, and configurations, with AI-powered error analysis | [GitHub](https://github.com/mirum8/jnscli) |
| 7 | Katie Backend | An open-source AI-based question-answering platform for accessing private domain knowledge | [GitHub](https://github.com/wyona/katie-backend) |
| 8 | TeleLlama3 Bot | A question-answering Telegram bot | [Repo](https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot) |
| 9 | moqui-wechat | A moqui-wechat component | [GitHub](https://github.com/heguangyong/moqui-wechat) |
| 10 | B4X | A set of simple and powerful RAD tool for Desktop and Server development | [Website](https://www.b4x.com/android/forum/threads/ollama4j-library-pnd_ollama4j-your-local-offline-llm-like-chatgpt.165003/) |
## Traction
[![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date)
## Get Involved
<div align="center">
<a href="">![Open Issues](https://img.shields.io/github/issues-raw/ollama4j/ollama4j)</a>
<a href="">![Closed Issues](https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Open PRs](https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j)</a>
<a href="">![Closed PRs](https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Discussions](https://img.shields.io/github/discussions/ollama4j/ollama4j)</a>
</div>
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Discussions]&#40;https://img.shields.io/github/discussions/ollama4j/ollama4j&#41;)
### Get Involved
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
with code - any sort
of contribution is much appreciated.
## 🏷️ License and Citation
The code is available under [MIT License](./LICENSE).
If you find this project helpful in your research, please cite this work at
```
@misc{ollama4j2024,
author = {Amith Koujalgi},
title = {Ollama4j: A Java Library (Wrapper/Binding) for Ollama Server},
year = {2024},
month = {January},
url = {https://github.com/ollama4j/ollama4j}
}
```
### References
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
### Credits
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
project.
### References
**Thanks to the amazing contributors**
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
<p align="center">
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" alt=""/>
</a>
</p>
### Appreciate my work?
<p align="center">
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
</p>

View File

@@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
applications! 🌐🚀
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
🌟 Key Features:
@@ -58,9 +58,9 @@ elevate your projects.
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/
Find the full API spec here: https://ollama4j.github.io/ollama4j/
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/
Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).

View File

@@ -1,42 +0,0 @@
---
sidebar_position: 2
---
# Ask - Async
This API lets you ask questions to the LLMs in a asynchronous way.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = "Who are you?";
OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
// poll for data from the response stream
String result = callback.getStream().poll();
if (response != null) {
System.out.print(result.getResponse());
}
Thread.sleep(100);
}
}
}
```
You will get a response similar to:
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.

View File

@@ -1,106 +0,0 @@
---
sidebar_position: 1
---
# Ask - Sync
This API lets you ask questions to the LLMs in a synchronous way.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
## Try asking a question about the model.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?");
System.out.println(result.getResponse());
}
}
```
You will get a response similar to:
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.
## Try asking a question from general topics.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = "List all cricket world cup teams of 2019.";
OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, prompt);
System.out.println(result.getResponse());
}
}
```
You'd then get a response from the model:
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
> following teams
> participated in the tournament:
>
> 1. Afghanistan
> 2. Australia
> 3. Bangladesh
> 4. England
> 5. India
> 6. New Zealand
> 7. Pakistan
> 8. South Africa
> 9. Sri Lanka
> 10. West Indies
>
> These teams competed in a round-robin format, with the top four teams advancing to the
> semi-finals. The tournament was
> won by the England cricket team, who defeated New Zealand in the final.
## Try asking for a Database query for your data schema.
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = SamplePrompts.getSampleDatabasePromptWithQuestion(
"List all customer names who have bought one or more products");
OllamaResult result = ollamaAPI.ask(OllamaModelType.SQLCODER, prompt);
System.out.println(result.getResponse());
}
}
```
_Note: Here I've used
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._
You'd then get a response from the model:
```sql
SELECT customers.name
FROM sales
JOIN customers ON sales.customer_id = customers.customer_id
GROUP BY customers.name;
```

View File

@@ -1,46 +0,0 @@
---
sidebar_position: 5
---
# Generate Embeddings
Generate embeddings from a model.
Parameters:
- `model`: name of model to generate embeddings from
- `prompt`: text to generate embeddings for
```java
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<Double> embeddings = ollamaAPI.generateEmbeddings(OllamaModelType.LLAMA2,
"Here is an article about llamas...");
embeddings.forEach(System.out::println);
}
}
```
You will get a response similar to:
```json
[
0.5670403838157654,
0.009260174818336964,
0.23178744316101074,
-0.2916173040866852,
-0.8924556970596313,
0.8785552978515625,
-0.34576427936553955,
0.5742510557174683,
-0.04222835972905159,
-0.137906014919281
]
```

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Extras",
"position": 10,
"position": 4,
"link": {
"type": "generated-index",
"description": "Details of APIs to handle bunch of extra stuff."

View File

@@ -10,6 +10,8 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@@ -0,0 +1,79 @@
---
sidebar_position: 1
---
# Options Builder
This lets you build options for the `ask()` API.
Following are the parameters supported by Ollama:
| Parameter | Description | Value Type | Example Usage |
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
| mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 |
| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 |
| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 |
| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 |
| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |
| num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |
| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 |
| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 |
| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 |
| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 |
| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 |
| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |
| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 |
| num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 |
| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 |
| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 |
Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
Also, see how to set those Ollama parameters using
the `OptionsBuilder`
from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
## Build an empty `Options` object
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
Options options = new OptionsBuilder().build();
}
}
```
## Build the `Options` object with values
```java
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
Options options =
new OptionsBuilder()
.setMirostat(10)
.setMirostatEta(0.5f)
.setNumGpu(2)
.setTemperature(1.5f)
.build();
}
}
```

View File

@@ -7,6 +7,8 @@ sidebar_position: 3
This API lets you check the reachability of Ollama server.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@@ -0,0 +1,30 @@
---
sidebar_position: 4
---
# PS
This API provides a list of running models and details about each model currently loaded into memory.
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
```java
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import java.io.IOException;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelsProcessResponse response = ollamaAPI.ps();
System.out.println(response);
}
}
```

View File

@@ -7,6 +7,8 @@ sidebar_position: 2
This API lets you set the request timeout for the Ollama client.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client.
## Try asking a question about the model.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Ask",
"position": 10,
"label": "APIs - Generate",
"position": 3,
"link": {
"type": "generated-index",
"description": "Details of APIs to interact with LLMs."

View File

@@ -0,0 +1,272 @@
---
sidebar_position: 7
---
# Chat
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
information using the history of already asked questions and the respective answers.
## Create a new conversation and use chat history to augment follow up questions
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponseModel().getMessage().getContent());
// create next userQuestion
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
// "continue" conversation with model
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponseModel().getMessage().getContent());
System.out.println("Chat History: " + chatResult.getChatHistory());
}
}
```
You will get a response similar to:
> First answer: Should be Paris!
>
> Second answer: Marseille.
>
> Chat History:
```json
[
{
"role": "user",
"content": "What is the capital of France?",
"images": []
},
{
"role": "assistant",
"content": "Should be Paris!",
"images": []
},
{
"role": "user",
"content": "And what is the second largest city?",
"images": []
},
{
"role": "assistant",
"content": "Marseille.",
"images": []
}
]
```
## Conversational loop
```java
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
ollamaAPI.setRequestTimeoutSeconds(60);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
System.out.println(initialChatResult.getResponse());
List<OllamaChatMessage> history = initialChatResult.getChatHistory();
while (true) {
OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
System.out.println(chatResult.getResponse());
history = chatResult.getChatHistory();
}
}
}
```
## Create a conversation where the answer is streamed
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// define a handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
};
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
}
}
```
You will get a response similar to:
> The
> The capital
> The capital of
> The capital of France
> The capital of France is
> The capital of France is Paris
> The capital of France is Paris.
## Use a simple Console Output Stream Handler
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler);
}
}
```
## Create a new conversation with individual system prompt
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
// start conversation with model
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println(chatResult.getResponseModel());
}
}
```
You will get a response similar to:
> NI.
## Create a conversation about an image (requires model with image recognition skills)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
import java.io.File;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(
new File("/path/to/image"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponseModel());
builder.reset();
// Use history to ask further questions about the image or assistant answer
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
System.out.println("Second answer: " + chatResult.getResponseModel());
}
}
```
You will get a response similar to:
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
> evening, given the warm lighting and the low position of the sun in the sky.
>
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
> confidently.

View File

@@ -0,0 +1,65 @@
---
sidebar_position: 8
---
# Custom Roles
Allows to manage custom roles (apart from the base roles) for chat interactions with the models.
_Particularly helpful when you would need to use different roles that the newer models support other than the base
roles._
_Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._
### Usage
#### Add new role
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role");
}
}
```
#### List roles
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<OllamaChatMessageRole> roles = ollamaAPI.listRoles();
}
}
```
#### Get role
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role");
}
}
```

View File

@@ -0,0 +1,48 @@
---
sidebar_position: 2
---
# Generate - Async
This API lets you ask questions to the LLMs in a asynchronous way.
This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
background (such as threads) without blocking your code until the response arrives from the model.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(60);
String prompt = "List all cricket world cup teams of 2019.";
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
// Set the poll interval according to your needs.
// Smaller the poll interval, more frequently you receive the tokens.
int pollIntervalMilliseconds = 1000;
while (true) {
String tokens = streamer.getStream().poll();
System.out.print(tokens);
if (!streamer.isAlive()) {
break;
}
Thread.sleep(pollIntervalMilliseconds);
}
System.out.println("\n------------------------");
System.out.println("Complete Response:");
System.out.println("------------------------");
System.out.println(streamer.getCompleteResponse());
}
}
```

View File

@@ -0,0 +1,118 @@
---
sidebar_position: 6
---
# Generate Embeddings
Generate embeddings from a model.
Parameters:
- `model`: name of model to generate embeddings from
- `input`: text/s to generate embeddings for
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaEmbedResponseModel embeddings = ollamaAPI.embed("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
System.out.println(embeddings);
}
}
```
Or, using the `OllamaEmbedRequestModel`:
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaEmbedResponseModel embeddings = ollamaAPI.embed(new OllamaEmbedRequestModel("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?")));
System.out.println(embeddings);
}
}
```
You will get a response similar to:
```json
{
"model": "all-minilm",
"embeddings": [[-0.034674067, 0.030984823, 0.0067988685]],
"total_duration": 14173700,
"load_duration": 1198800,
"prompt_eval_count": 2
}
````
:::note
This is a deprecated API
:::
Parameters:
- `model`: name of model to generate embeddings from
- `prompt`: text to generate embeddings for
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<Double> embeddings = ollamaAPI.generateEmbeddings(OllamaModelType.LLAMA2,
"Here is an article about llamas...");
embeddings.forEach(System.out::println);
}
}
```
You will get a response similar to:
```javascript
[
0.5670403838157654,
0.009260174818336964,
0.23178744316101074,
-0.2916173040866852,
-0.8924556970596313
]
```

View File

@@ -1,27 +1,35 @@
---
sidebar_position: 3
sidebar_position: 4
---
# Ask - With Image Files
# Generate - With Image Files
This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
:::caution
:::note
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
recommended.
:::
## Ask (Sync)
## Synchronous mode
If you have this image downloaded and you pass the path to the downloaded image to the following code:
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.File;
import java.util.List;
public class Main {
public static void main(String[] args) {
@@ -29,10 +37,12 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(10);
OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
"What's in this image?",
List.of(
new File("/path/to/image")));
new File("/path/to/image")),
new OptionsBuilder().build()
);
System.out.println(result.getResponse());
}
}

View File

@@ -1,14 +1,14 @@
---
sidebar_position: 4
sidebar_position: 5
---
# Ask - With Image URLs
# Generate - With Image URLs
This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
:::caution
:::note
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
recommended.
@@ -22,6 +22,13 @@ Passing the link of this image the following code:
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.util.List;
public class Main {
public static void main(String[] args) {
@@ -29,10 +36,12 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(10);
OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
"What's in this image?",
List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build()
);
System.out.println(result.getResponse());
}
}

View File

@@ -0,0 +1,642 @@
---
sidebar_position: 3
---
# Generate - With Tools
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
synchronous way.
This API corresponds to
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
:::note
This is an only an experimental implementation and has a very basic design.
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
this
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
:::
### Function Calling/Tools
Assume you want to call a method in your code based on the response generated from the model.
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
transaction from your database and respond to the user with the transaction details.
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
### Create Functions
We can create static functions as our tools.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value.
```java
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
```
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
location.
```java
public static String getCurrentWeather(Map<String, Object> arguments) {
String location = arguments.get("city").toString();
return "Currently " + location + "'s weather is nice.";
}
```
Another way to create our tools is by creating classes by extending `ToolFunction`.
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
details.
```java
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
```
### Define Tool Specifications
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
```java
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-fuel-price")
.functionDescription("Get current fuel price")
.properties(
new Tools.PropsBuilder()
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentFuelPrice)
.build();
```
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
- Specify the function `name`, `description`, and `required` property (`city`).
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
```java
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather")
.functionDescription("Get current weather")
.properties(
new Tools.PropsBuilder()
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentWeather)
.build();
```
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
- Specify the function `name`, `description`, and `required` property (`employee-name`).
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
```java
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
```
### Register the Tools
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
```shell
ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
```
### Create prompt with Tools
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
```shell
String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Now, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
::::
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
```shell
String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Again, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
::::
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
```shell
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Again, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
::::
### Full Example
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.ollama4j.tools.OllamaToolsResult;
import io.github.ollama4j.tools.ToolFunction;
import io.github.ollama4j.tools.Tools;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
public class FunctionCallingWithMistralExample {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(60);
String model = "mistral";
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-fuel-price")
.functionDescription("Get current fuel price")
.properties(
new Tools.PropsBuilder()
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentFuelPrice)
.build();
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather")
.functionDescription("Get current weather")
.properties(
new Tools.PropsBuilder()
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentWeather)
.build();
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?")
.build();
ask(ollamaAPI, model, prompt1);
String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?")
.build();
ask(ollamaAPI, model, prompt2);
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
ask(ollamaAPI, model, prompt3);
}
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
}
}
class SampleTools {
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
// Get details from fuel price API
String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
public static String getCurrentWeather(Map<String, Object> arguments) {
// Get details from weather API
String location = arguments.get("city").toString();
return "Currently " + location + "'s weather is nice.";
}
}
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
```
Run this full example and you will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
::::
### Using tools in Chat-API
Instead of using the specific `ollamaAPI.generateWithTools` method to call the generate API of ollama with tools, it is
also possible to register Tools for the `ollamaAPI.chat` methods. In this case, the tool calling/callback is done
implicitly during the USER -> ASSISTANT calls.
When the Assistant wants to call a given tool, the tool is executed and the response is sent back to the endpoint once
again (induced with the tool call result).
#### Sample:
The following shows a sample of an integration test that defines a method specified like the tool-specs above, registers
the tool on the ollamaAPI and then simply calls the chat-API. All intermediate tool calling is wrapped inside the api
call.
```java
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ollamaAPI.setVerbose(true);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("llama3.2:1b");
final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.toolPrompt(
Tools.PromptFuncDefinition.builder().type("function").function(
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
.name("get-employee-details")
.description("Get employee details from the database")
.parameters(
Tools.PromptFuncDefinition.Parameters.builder()
.type("object")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.required(List.of("employee-name"))
.build()
).build()
).build()
)
.toolFunction(new DBQueryFunction())
.build();
ollamaAPI.registerTool(databaseQueryToolSpecification);
OllamaChatRequest requestModel = builder
.withMessage(OllamaChatMessageRole.USER,
"Give me the ID of the employee named 'Rahul Kumar'?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
}
```
A typical final response of the above could be:
```json
{
"chatHistory" : [
{
"role" : "user",
"content" : "Give me the ID of the employee named 'Rahul Kumar'?",
"images" : null,
"tool_calls" : [ ]
}, {
"role" : "assistant",
"content" : "",
"images" : null,
"tool_calls" : [ {
"function" : {
"name" : "get-employee-details",
"arguments" : {
"employee-name" : "Rahul Kumar"
}
}
} ]
}, {
"role" : "tool",
"content" : "[TOOL_RESULTS]get-employee-details([employee-name]) : Employee Details {ID: b4bf186c-2ee1-44cc-8856-53b8b6a50f85, Name: Rahul Kumar, Address: null, Phone: null}[/TOOL_RESULTS]",
"images" : null,
"tool_calls" : null
}, {
"role" : "assistant",
"content" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
"images" : null,
"tool_calls" : null
} ],
"responseModel" : {
"model" : "llama3.2:1b",
"message" : {
"role" : "assistant",
"content" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
"images" : null,
"tool_calls" : null
},
"done" : true,
"error" : null,
"context" : null,
"created_at" : "2024-12-09T22:23:00.4940078Z",
"done_reason" : "stop",
"total_duration" : 2313709900,
"load_duration" : 14494700,
"prompt_eval_duration" : 772000000,
"eval_duration" : 1188000000,
"prompt_eval_count" : 166,
"eval_count" : 41
},
"response" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
"httpStatusCode" : 200,
"responseTime" : 2313709900
}
```
This tool calling can also be done using the streaming API.
### Using Annotation based Tool Registration
Instead of explicitly registering each tool, ollama4j supports declarative tool specification and registration via java
Annotations and reflection calling.
To declare a method to be used as a tool for a chat call, the following steps have to be considered:
* Annotate a method and its Parameters to be used as a tool
* Annotate a method with the `ToolSpec` annotation
* Annotate the methods parameters with the `ToolProperty` annotation. Only the following datatypes are supported for now:
* `java.lang.String`
* `java.lang.Integer`
* `java.lang.Boolean`
* `java.math.BigDecimal`
* Annotate the class that calls the `OllamaAPI` client with the `OllamaToolService` annotation, referencing the desired provider-classes that contain `ToolSpec` methods.
* Before calling the `OllamaAPI` chat request, call the method `OllamaAPI.registerAnnotatedTools()` method to add tools to the chat.
#### Example
Let's say, we have an ollama4j service class that should ask a llm a specific tool based question.
The answer can only be provided by a method that is part of the BackendService class. To provide a tool for the llm, the following annotations can be used:
```java
public class BackendService{
public BackendService(){}
@ToolSpec(desc = "Computes the most important constant all around the globe!")
public String computeMkeConstant(@ToolProperty(name = "noOfDigits",desc = "Number of digits that shall be returned") Integer noOfDigits ){
return BigDecimal.valueOf((long)(Math.random()*1000000L),noOfDigits).toString();
}
}
```
The caller API can then be written as:
```java
import io.github.ollama4j.tools.annotations.OllamaToolService;
@OllamaToolService(providers = BackendService.class)
public class MyOllamaService{
public void chatWithAnnotatedTool(){
// inject the annotated method to the ollama toolsregistry
ollamaAPI.registerAnnotatedTools();
OllamaChatRequest requestModel = builder
.withMessage(OllamaChatMessageRole.USER,
"Compute the most important constant in the world using 5 digits")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
}
}
```
Or, if one needs to provide an object instance directly:
```java
public class MyOllamaService{
public void chatWithAnnotatedTool(){
ollamaAPI.registerAnnotatedTools(new BackendService());
OllamaChatRequest requestModel = builder
.withMessage(OllamaChatMessageRole.USER,
"Compute the most important constant in the world using 5 digits")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
}
}
```
The request should be the following:
```json
{
"model" : "llama3.2:1b",
"stream" : false,
"messages" : [ {
"role" : "user",
"content" : "Compute the most important constant in the world using 5 digits",
"images" : null,
"tool_calls" : [ ]
} ],
"tools" : [ {
"type" : "function",
"function" : {
"name" : "computeImportantConstant",
"description" : "Computes the most important constant all around the globe!",
"parameters" : {
"type" : "object",
"properties" : {
"noOfDigits" : {
"type" : "java.lang.Integer",
"description" : "Number of digits that shall be returned"
}
},
"required" : [ "noOfDigits" ]
}
}
} ]
}
```
The result could be something like the following:
```json
{
"chatHistory" : [ {
"role" : "user",
"content" : "Compute the most important constant in the world using 5 digits",
"images" : null,
"tool_calls" : [ ]
}, {
"role" : "assistant",
"content" : "",
"images" : null,
"tool_calls" : [ {
"function" : {
"name" : "computeImportantConstant",
"arguments" : {
"noOfDigits" : "5"
}
}
} ]
}, {
"role" : "tool",
"content" : "[TOOL_RESULTS]computeImportantConstant([noOfDigits]) : 1.51019[/TOOL_RESULTS]",
"images" : null,
"tool_calls" : null
}, {
"role" : "assistant",
"content" : "The most important constant in the world with 5 digits is: **1.51019**",
"images" : null,
"tool_calls" : null
} ],
"responseModel" : {
"model" : "llama3.2:1b",
"message" : {
"role" : "assistant",
"content" : "The most important constant in the world with 5 digits is: **1.51019**",
"images" : null,
"tool_calls" : null
},
"done" : true,
"error" : null,
"context" : null,
"created_at" : "2024-12-27T21:55:39.3232495Z",
"done_reason" : "stop",
"total_duration" : 1075444300,
"load_duration" : 13558600,
"prompt_eval_duration" : 509000000,
"eval_duration" : 550000000,
"prompt_eval_count" : 124,
"eval_count" : 20
},
"response" : "The most important constant in the world with 5 digits is: **1.51019**",
"responseTime" : 1075444300,
"httpStatusCode" : 200
}
```
### Potential Improvements
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
specific args separately with their data types. For example:
```shell
public String getCurrentFuelPrice(String location, String fuelType) {
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
```
Updating async/chat APIs with support for tool-based generation.

View File

@@ -0,0 +1,175 @@
---
sidebar_position: 1
---
# Generate - Sync
This API lets you ask questions to the LLMs in a synchronous way.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
Use the `OptionBuilder` to build the `Options` object
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
Refer
to [this](/apis-extras/options-builder).
## Try asking a question about the model.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaResult result =
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
System.out.println(result.getResponse());
}
}
```
You will get a response similar to:
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.
## Try asking a question, receiving the answer streamed
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
// define a stream handler (Consumer<String>)
OllamaStreamHandler streamHandler = (s) -> {
System.out.println(s);
};
// Should be called using seperate thread to gain non blocking streaming effect.
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build(), streamHandler);
System.out.println("Full response: " + result.getResponse());
}
}
```
You will get a response similar to:
> The
> The capital
> The capital of
> The capital of France
> The capital of France is
> The capital of France is Paris
> The capital of France is Paris.
> Full response: The capital of France is Paris.
## Try asking a question from general topics.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt = "List all cricket world cup teams of 2019.";
OllamaResult result =
ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
System.out.println(result.getResponse());
}
}
```
You'd then get a response from the model:
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
> following teams
> participated in the tournament:
>
> 1. Afghanistan
> 2. Australia
> 3. Bangladesh
> 4. England
> 5. India
> 6. New Zealand
> 7. Pakistan
> 8. South Africa
> 9. Sri Lanka
> 10. West Indies
>
> These teams competed in a round-robin format, with the top four teams advancing to the
> semi-finals. The tournament was
> won by the England cricket team, who defeated New Zealand in the final.
## Try asking for a Database query for your data schema.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.SamplePrompts;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
String prompt =
SamplePrompts.getSampleDatabasePromptWithQuestion(
"List all customer names who have bought one or more products");
OllamaResult result =
ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
System.out.println(result.getResponse());
}
}
```
_Note: Here I've used
a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._
You'd then get a response from the model:
```sql
SELECT customers.name
FROM sales
JOIN customers ON sales.customer_id = customers.customer_id
GROUP BY customers.name;
```

View File

@@ -0,0 +1,74 @@
---
sidebar_position: 6
---
# Prompt Builder
This is designed for prompt engineering. It allows you to easily build the prompt text for zero-shot, one-shot, few-shot
inferences.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.PromptBuilder;
public class Main {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(10);
String model = OllamaModelType.PHI;
PromptBuilder promptBuilder =
new PromptBuilder()
.addLine("You are an expert coder and understand different programming languages.")
.addLine("Given a question, answer ONLY with code.")
.addLine("Produce clean, formatted and indented code in markdown format.")
.addLine(
"DO NOT include ANY extra text apart from code. Follow this instruction very strictly!")
.addLine("If there's any additional information you want to add, use comments within code.")
.addLine("Answer only in the programming language that has been asked for.")
.addSeparator()
.addLine("Example: Sum 2 numbers in Python")
.addLine("Answer:")
.addLine("```python")
.addLine("def sum(num1: int, num2: int) -> int:")
.addLine(" return num1 + num2")
.addLine("```")
.addSeparator()
.add("How do I read a file in Go and print its contents to stdout?");
boolean raw = false;
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
System.out.println(response.getResponse());
}
}
```
You will get a response similar to:
```go
package main
import (
"fmt"
"io/ioutil"
)
func readFile(fileName string) {
file, err := ioutil.ReadFile(fileName)
if err != nil {
fmt.Fprintln(os.Stderr, "Error reading file:", err.Error())
return
}
f, _ := ioutil.ReadFile("file.txt")
if f != nil {
fmt.Println(f.String())
}
}
```

View File

@@ -1,6 +1,6 @@
{
"label": "APIs - Model Management",
"position": 4,
"position": 2,
"link": {
"type": "generated-index",
"description": "Details of APIs to manage LLMs."

View File

@@ -1,12 +1,16 @@
---
sidebar_position: 4
sidebar_position: 5
---
# Create Model
This API lets you create a custom model on the Ollama server.
### Create a custom model from an existing model in the Ollama server
```java title="CreateModel.java"
import io.github.ollama4j.OllamaAPI;
public class CreateModel {
public static void main(String[] args) {
@@ -15,9 +19,220 @@ public class CreateModel {
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.createModel("mycustommodel", "/path/to/modelfile/on/ollama-server");
ollamaAPI.createModel(CustomModelRequest.builder().model("mario").from("llama3.2:latest").system("You are Mario from Super Mario Bros.").build());
}
}
```
Once created, you can see it when you use [list models](./list-models) API.
Once created, you can see it when you use [list models](./list-models) API.
[Read more](https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model) about custom model creation and the parameters available for model creation.
[//]: # ()
[//]: # (### Example of a `Modelfile`)
[//]: # ()
[//]: # (```)
[//]: # (FROM llama2)
[//]: # (# sets the temperature to 1 [higher is more creative, lower is more coherent])
[//]: # (PARAMETER temperature 1)
[//]: # (# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token)
[//]: # (PARAMETER num_ctx 4096)
[//]: # ()
[//]: # (# sets a custom system message to specify the behavior of the chat assistant)
[//]: # (SYSTEM You are Mario from super mario bros, acting as an assistant.)
[//]: # (```)
[//]: # ()
[//]: # (### Format of the `Modelfile`)
[//]: # ()
[//]: # (```modelfile)
[//]: # (# comment)
[//]: # (INSTRUCTION arguments)
[//]: # (```)
[//]: # ()
[//]: # (| Instruction | Description |)
[//]: # (|-------------------------------------|----------------------------------------------------------------|)
[//]: # (| [`FROM`]&#40;#from-required&#41; &#40;required&#41; | Defines the base model to use. |)
[//]: # (| [`PARAMETER`]&#40;#parameter&#41; | Sets the parameters for how Ollama will run the model. |)
[//]: # (| [`TEMPLATE`]&#40;#template&#41; | The full prompt template to be sent to the model. |)
[//]: # (| [`SYSTEM`]&#40;#system&#41; | Specifies the system message that will be set in the template. |)
[//]: # (| [`ADAPTER`]&#40;#adapter&#41; | Defines the &#40;Q&#41;LoRA adapters to apply to the model. |)
[//]: # (| [`LICENSE`]&#40;#license&#41; | Specifies the legal license. |)
[//]: # ()
[//]: # (#### PARAMETER)
[//]: # ()
[//]: # (The `PARAMETER` instruction defines a parameter that can be set when the model is run.)
[//]: # ()
[//]: # (| Parameter | Description | Value Type | Example Usage |)
[//]: # (|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|)
[//]: # (| mirostat | Enable Mirostat sampling for controlling perplexity. &#40;default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0&#41; | int | mirostat 0 |)
[//]: # (| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. &#40;Default: 0.1&#41; | float | mirostat_eta 0.1 |)
[//]: # (| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. &#40;Default: 5.0&#41; | float | mirostat_tau 5.0 |)
[//]: # (| num_ctx | Sets the size of the context window used to generate the next token. &#40;Default: 2048&#41; | int | num_ctx 4096 |)
[//]: # (| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |)
[//]: # (| num_gpu | The number of layers to send to the GPU&#40;s&#41;. On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |)
[//]: # (| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has &#40;as opposed to the logical number of cores&#41;. | int | num_thread 8 |)
[//]: # (| repeat_last_n | Sets how far back for the model to look back to prevent repetition. &#40;Default: 64, 0 = disabled, -1 = num_ctx&#41; | int | repeat_last_n 64 |)
[//]: # (| repeat_penalty | Sets how strongly to penalize repetitions. A higher value &#40;e.g., 1.5&#41; will penalize repetitions more strongly, while a lower value &#40;e.g., 0.9&#41; will be more lenient. &#40;Default: 1.1&#41; | float | repeat_penalty 1.1 |)
[//]: # (| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. &#40;Default: 0.8&#41; | float | temperature 0.7 |)
[//]: # (| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. &#40;Default: 0&#41; | int | seed 42 |)
[//]: # (| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |)
[//]: # (| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value &#40;e.g., 2.0&#41; will reduce the impact more, while a value of 1.0 disables this setting. &#40;default: 1&#41; | float | tfs_z 1 |)
[//]: # (| num_predict | Maximum number of tokens to predict when generating text. &#40;Default: 128, -1 = infinite generation, -2 = fill context&#41; | int | num_predict 42 |)
[//]: # (| top_k | Reduces the probability of generating nonsense. A higher value &#40;e.g. 100&#41; will give more diverse answers, while a lower value &#40;e.g. 10&#41; will be more conservative. &#40;Default: 40&#41; | int | top_k 40 |)
[//]: # (| top_p | Works together with top-k. A higher value &#40;e.g., 0.95&#41; will lead to more diverse text, while a lower value &#40;e.g., 0.5&#41; will generate more focused and conservative text. &#40;Default: 0.9&#41; | float | top_p 0.9 |)
[//]: # ()
[//]: # (#### TEMPLATE)
[//]: # ()
[//]: # (`TEMPLATE` of the full prompt template to be passed into the model. It may include &#40;optionally&#41; a system message and a)
[//]: # (user's prompt. This is used to create a full custom prompt, and syntax may be model specific. You can usually find the)
[//]: # (template for a given model in the readme for that model.)
[//]: # ()
[//]: # (#### Template Variables)
[//]: # ()
[//]: # (| Variable | Description |)
[//]: # (|-----------------|---------------------------------------------------------------------------------------------------------------|)
[//]: # (| `{{ .System }}` | The system message used to specify custom behavior, this must also be set in the Modelfile as an instruction. |)
[//]: # (| `{{ .Prompt }}` | The incoming prompt, this is not specified in the model file and will be set based on input. |)
[//]: # (| `{{ .First }}` | A boolean value used to render specific template information for the first generation of a session. |)
[//]: # ()
[//]: # (```modelfile)
[//]: # (TEMPLATE """)
[//]: # ({{- if .First }})
[//]: # (### System:)
[//]: # ({{ .System }})
[//]: # ({{- end }})
[//]: # ()
[//]: # (### User:)
[//]: # ({{ .Prompt }})
[//]: # ()
[//]: # (### Response:)
[//]: # (""")
[//]: # ()
[//]: # (SYSTEM """<system message>""")
[//]: # (```)
[//]: # ()
[//]: # (### SYSTEM)
[//]: # ()
[//]: # (The `SYSTEM` instruction specifies the system message to be used in the template, if applicable.)
[//]: # ()
[//]: # (```modelfile)
[//]: # (SYSTEM """<system message>""")
[//]: # (```)
[//]: # ()
[//]: # (### ADAPTER)
[//]: # ()
[//]: # (The `ADAPTER` instruction specifies the LoRA adapter to apply to the base model. The value of this instruction should be)
[//]: # (an absolute path or a path relative to the Modelfile and the file must be in a GGML file format. The adapter should be)
[//]: # (tuned from the base model otherwise the behaviour is undefined.)
[//]: # ()
[//]: # (```modelfile)
[//]: # (ADAPTER ./ollama-lora.bin)
[//]: # (```)
[//]: # ()
[//]: # (### LICENSE)
[//]: # ()
[//]: # (The `LICENSE` instruction allows you to specify the legal license under which the model used with this Modelfile is)
[//]: # (shared or distributed.)
[//]: # ()
[//]: # (```modelfile)
[//]: # (LICENSE """)
[//]: # (<license text>)
[//]: # (""")
[//]: # (```)
[//]: # ()
[//]: # (## Notes)
[//]: # ()
[//]: # (- the **`Modelfile` is not case sensitive**. In the examples, uppercase instructions are used to make it easier to)
[//]: # ( distinguish it from arguments.)
[//]: # (- Instructions can be in any order. In the examples, the `FROM` instruction is first to keep it easily readable.)
[//]: # ()
[//]: # (Read more about Modelfile: https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md)

View File

@@ -1,5 +1,5 @@
---
sidebar_position: 5
sidebar_position: 6
---
# Delete Model
@@ -7,6 +7,8 @@ sidebar_position: 5
This API lets you create a delete a model from the Ollama server.
```java title="DeleteModel.java"
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@@ -1,5 +1,5 @@
---
sidebar_position: 3
sidebar_position: 4
---
# Get Model Details
@@ -7,6 +7,10 @@ sidebar_position: 3
This API lets you get the details of a model on the Ollama server.
```java title="GetModelDetails.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {

View File

@@ -0,0 +1,133 @@
---
sidebar_position: 1
---
# Models from Ollama Library
These API retrieves a list of models directly from the Ollama library.
### List Models from Ollama Library
This API fetches available models from the Ollama library page, including details such as the model's name, pull count,
popular tags, tag count, and the last update time.
```java title="ListLibraryModels.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.LibraryModel;
import java.util.List;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary();
System.out.println(libraryModels);
}
}
```
The following is the sample output:
```
[
LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),
LibraryModel(name=llama3.2, description=Meta's Llama 3.2 goes small with 1B and 3B models., pullCount=2.4M, totalTags=63, popularTags=[tools, 1b, 3b], lastUpdated=6 weeks ago)
]
```
### Get Tags of a Library Model
This API Fetches the tags associated with a specific model from Ollama library.
```java title="GetLibraryModelTags.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.LibraryModel;
import io.github.ollama4j.models.response.LibraryModelDetail;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary();
LibraryModelDetail libraryModelDetail = ollamaAPI.getLibraryModelDetails(libraryModels.get(0));
System.out.println(libraryModelDetail);
}
}
```
The following is the sample output:
```
LibraryModelDetail(
model=LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),
tags=[
LibraryModelTag(name=llama3.2-vision, tag=latest, size=7.9GB, lastUpdated=yesterday),
LibraryModelTag(name=llama3.2-vision, tag=11b, size=7.9GB, lastUpdated=yesterday),
LibraryModelTag(name=llama3.2-vision, tag=90b, size=55GB, lastUpdated=yesterday)
]
)
```
### Find a model from Ollama library
This API finds a specific model using model `name` and `tag` from Ollama library.
```java title="FindLibraryModel.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.LibraryModelTag;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b");
System.out.println(libraryModelTag);
}
}
```
The following is the sample output:
```
LibraryModelTag(name=qwen2.5, tag=7b, size=4.7GB, lastUpdated=7 weeks ago)
```
### Pull model using `LibraryModelTag`
You can use `LibraryModelTag` to pull models into Ollama server.
```java title="PullLibraryModelTags.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.LibraryModelTag;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b");
ollamaAPI.pullModel(libraryModelTag);
}
}
```

View File

@@ -1,12 +1,17 @@
---
sidebar_position: 1
sidebar_position: 2
---
# List Models
# List Local Models
This API lets you list available models on the Ollama server.
This API lets you list downloaded/available models on the Ollama server.
```java title="ListModels.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.Model;
import java.util.List;
public class ListModels {
public static void main(String[] args) {

View File

@@ -1,5 +1,5 @@
---
sidebar_position: 2
sidebar_position: 3
---
# Pull Model
@@ -7,10 +7,13 @@ sidebar_position: 2
This API lets you pull a model on the Ollama server.
```java title="PullModel.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
@@ -20,4 +23,12 @@ public class Main {
}
```
Once downloaded, you can see them when you use [list models](./list-models) API.
Once downloaded, you can see them when you use [list models](./list-models) API.
:::info
You can even pull models using Ollama model library APIs. This looks up the models directly on the Ollama model library page. Refer
to [this](./list-library-models#pull-model-using-librarymodeltag).
:::

View File

@@ -2,10 +2,38 @@
sidebar_position: 1
---
# Intro
# Introduction
Let's get started with **Ollama4j**.
## 🦙 What is Ollama?
[Ollama](https://ollama.ai/) is an advanced AI tool that allows users to easily set up and run large language models
locally (in CPU and GPU
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
models.
## 👨‍💻 Why Ollama4j?
Ollama4j was built for the simple purpose of integrating Ollama with Java applications.
```mermaid
flowchart LR
o4j[Ollama4j]
o[Ollama Server]
o4j -->|Communicates with| o;
m[Models]
p[Your Java Project]
subgraph Your Java Environment
direction TB
p -->|Uses| o4j
end
subgraph Ollama Setup
direction TB
o -->|Manages| m
end
```
## Getting Started
### What you'll need
@@ -50,13 +78,13 @@ Add the dependency to your project's `pom.xml`.
```xml
<dependency>
<groupId>io.github.amithkoujalgi</groupId>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.27</version>
<version>1.0.78</version>
</dependency>
```
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
example,
@@ -88,6 +116,26 @@ or use other suitable implementations.
Create a new Java class in your project and add this code.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
}
}
```
This uses the default Ollama host as `http://localhost:11434`.
Specify a different Ollama host that you want to connect to.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
@@ -99,7 +147,7 @@ public class OllamaAPITest {
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
}
}
```

View File

@@ -20,7 +20,7 @@ const config = {
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
organizationName: 'ollama4j', // Usually your GitHub org/user name.
projectName: 'ollama4j', // Usually your repo name.
onBrokenLinks: 'throw',
@@ -40,22 +40,28 @@ const config = {
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
path: 'docs',
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
sidebarPath: './sidebars.js',
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
'https://github.com/ollama4j/ollama4j/blob/main/docs',
},
blog: {
showReadingTime: true,
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
'https://github.com/ollama4j/ollama4j/blob/main/docs',
},
theme: {
customCss: './src/css/custom.css',
},
gtag: {
trackingID: 'G-G7FLH6FNDC',
anonymizeIP: false,
},
}),
],
],
@@ -76,12 +82,13 @@ const config = {
type: 'docSidebar',
sidebarId: 'tutorialSidebar',
position: 'left',
label: 'Usage',
label: 'Docs',
},
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
{to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
{to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
{to: '/blog', label: 'Blog', position: 'left'},
{
href: 'https://github.com/amithkoujalgi/ollama4j',
href: 'https://github.com/ollama4j/ollama4j',
label: 'GitHub',
position: 'right',
},
@@ -95,7 +102,7 @@ const config = {
items: [
{
label: 'Tutorial',
to: '/docs/intro',
to: '/intro',
},
],
},
@@ -121,7 +128,7 @@ const config = {
},
{
label: 'GitHub',
href: 'https://github.com/amithkoujalgi/ollama4j',
href: 'https://github.com/ollama4j/ollama4j',
},
],
},
@@ -131,8 +138,13 @@ const config = {
prism: {
theme: prismThemes.github,
darkTheme: prismThemes.dracula,
additionalLanguages: ['java'],
},
}),
markdown: {
mermaid: true,
},
themes: ['@docusaurus/theme-mermaid']
};
export default config;

3481
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -14,8 +14,10 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "3.0.1",
"@docusaurus/preset-classic": "3.0.1",
"@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@@ -23,8 +25,8 @@
"react-dom": "^18.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/types": "3.0.1"
"@docusaurus/module-type-aliases": "^3.4.0",
"@docusaurus/types": "^3.4.0"
},
"browserslist": {
"production": [

View File

@@ -0,0 +1,41 @@
import React from "react";
class BuyMeACoffee extends React.Component {
constructor(props) {
super(props)
let script = document.createElement("script");
script.src = 'https://cdnjs.buymeacoffee.com/1.0.0/widget.prod.min.js';
script.dataset.name = 'BMC-Widget';
script.dataset.cfasync = 'false';
script.dataset.id = 'amithkoujalgi';
script.dataset.description = 'Support me on Buy me a coffee!';
script.dataset.message = 'If you like my work and want to say thanks, or encourage me to do more, you can buy me a coffee! 😊';
script.dataset.color = '#2e8555';
script.dataset.position = 'Right';
script.dataset.x_margin = '18';
script.dataset.y_margin = '18';
script.async = true
script.onload = function () {
let evt = document.createEvent('Event');
evt.initEvent('DOMContentLoaded', false, false);
window.dispatchEvent(evt);
}
this.script = script
}
componentDidMount() {
document.head.appendChild(this.script)
}
// componentWillUnmount() {
// document.head.removeChild(this.script);
// document.body.removeChild(document.getElementById("bmc-wbtn"))
// }
render() {
return null
}
}
export default BuyMeACoffee;

View File

@@ -6,25 +6,43 @@
/* You can override the default Infima variables here. */
:root {
--ifm-color-primary: #2e8555;
--ifm-color-primary-dark: #29784c;
--ifm-color-primary-darker: #277148;
--ifm-color-primary-darkest: #205d3b;
--ifm-color-primary-light: #33925d;
--ifm-color-primary-lighter: #359962;
--ifm-color-primary-lightest: #3cad6e;
--ifm-code-font-size: 95%;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
--ifm-color-primary: #2e8555;
--ifm-color-primary-dark: #29784c;
--ifm-color-primary-darker: #277148;
--ifm-color-primary-darkest: #205d3b;
--ifm-color-primary-light: #33925d;
--ifm-color-primary-lighter: #359962;
--ifm-color-primary-lightest: #3cad6e;
--ifm-code-font-size: 95%;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
}
/* For readability concerns, you should choose a lighter palette in dark mode. */
[data-theme='dark'] {
--ifm-color-primary: #25c2a0;
--ifm-color-primary-dark: #21af90;
--ifm-color-primary-darker: #1fa588;
--ifm-color-primary-darkest: #1a8870;
--ifm-color-primary-light: #29d5b0;
--ifm-color-primary-lighter: #32d8b4;
--ifm-color-primary-lightest: #4fddbf;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
--ifm-color-primary: #25c2a0;
--ifm-color-primary-dark: #21af90;
--ifm-color-primary-darker: #1fa588;
--ifm-color-primary-darkest: #1a8870;
--ifm-color-primary-light: #29d5b0;
--ifm-color-primary-lighter: #32d8b4;
--ifm-color-primary-lightest: #4fddbf;
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
}
article > header > h1 {
font-size: 2rem !important;
}
div > h1,
header > h1,
h2 > a {
font-size: 2rem !important;
}
#bmc-wbtn{
bottom: 15px;
}
#bmc-wbtn + div{
bottom:15px;
}

View File

@@ -3,38 +3,43 @@ import Link from '@docusaurus/Link';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import Layout from '@theme/Layout';
import HomepageFeatures from '@site/src/components/HomepageFeatures';
import BuyMeACoffee from '@site/src/components/BuyMeACoffee';
import Heading from '@theme/Heading';
import styles from './index.module.css';
import BrowserOnly from '@docusaurus/BrowserOnly';
function HomepageHeader() {
const {siteConfig} = useDocusaurusContext();
return (<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<Heading as="h1" className="hero__title">
{siteConfig.title}
</Heading>
<img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo} style={{maxWidth: '20vh'}}/>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="/docs/intro">
Getting Started
</Link>
</div>
</div>
</header>);
const {siteConfig} = useDocusaurusContext();
return (<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<Heading as="h1" className="hero__title">
{siteConfig.title}
</Heading>
<img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo}
style={{maxWidth: '20vh'}}/>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="/intro">
Getting Started
</Link>
</div>
</div>
</header>);
}
export default function Home() {
const {siteConfig} = useDocusaurusContext();
return (<Layout
title={`Hello from ${siteConfig.title}`}
description="Description will go into a meta tag in <head />">
<HomepageHeader/>
<main>
<HomepageFeatures/>
</main>
</Layout>);
}
const {siteConfig} = useDocusaurusContext();
return (<Layout
title={`Hello from ${siteConfig.title}`}
description="Description will go into a meta tag in <head />">
<HomepageHeader/>
<main>
<HomepageFeatures/>
<BrowserOnly>
{() => <BuyMeACoffee />}
</BrowserOnly>
</main>
</Layout>);
}

BIN
logo-small.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

581
pom.xml
View File

@@ -1,23 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.amithkoujalgi</groupId>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.36</version>
<version>ollama4j-revision</version>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
<url>https://github.com/ollama4j/ollama4j</url>
<packaging>jar</packaging>
<properties>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven-surefire-plugin.version>3.0.0-M5</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>3.0.0-M5</maven-failsafe-plugin.version>
<lombok.version>1.18.30</lombok.version>
</properties>
<properties>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven-surefire-plugin.version>3.0.0-M5</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>3.0.0-M5</maven-failsafe-plugin.version>
<lombok.version>1.18.30</lombok.version>
</properties>
<developers>
<developer>
@@ -28,273 +30,302 @@
</developer>
</developers>
<licenses>
<license>
<name>MIT License</name>
<url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url>
</license>
</licenses>
<licenses>
<license>
<name>MIT License</name>
<url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
</license>
</licenses>
<scm>
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<tag>v1.0.36</tag>
</scm>
<scm>
<connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
<url>https://github.com/ollama4j/ollama4j</url>
<tag>ollama4j-revision</tag>
</scm>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.5.0</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
<!-- <version>1.5</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>sign-artifacts</id>-->
<!-- <phase>verify</phase>-->
<!-- <goals>-->
<!-- <goal>sign</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- &lt;!&ndash; This is necessary for gpg to not try to use the pinentry programs &ndash;&gt;-->
<!-- <gpgArguments>-->
<!-- <arg>&#45;&#45;pinentry-mode</arg>-->
<!-- <arg>loopback</arg>-->
<!-- </gpgArguments>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<!-- Surefire Plugin for Unit Tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<skipTests>${skipUnitTests}</skipTests>
<includes>
<include>**/unittests/*.java</include>
</includes>
</configuration>
</plugin>
<!-- Failsafe Plugin for Integration Tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<configuration>
<includes>
<include>**/integrationtests/*.java</include>
</includes>
<excludes>
<exclude>**/unittests/*.java</exclude>
</excludes>
<skipTests>${skipIntegrationTests}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>3.0.1</version>
<configuration>
<!-- <goals>install</goals>-->
<tagNameFormat>v@{project.version}</tagNameFormat>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.15.3</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.9</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.10.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>4.1.0</version>
<scope>test</scope>
</dependency>
</dependencies>
<distributionManagement>
<snapshotRepository>
<id>ossrh</id>
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
</snapshotRepository>
<repository>
<id>ossrh</id>
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
</repository>
</distributionManagement>
<profiles>
<profile>
<id>unit-tests</id>
<properties>
<test.env>unit</test.env>
<skipUnitTests>false</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<build>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.7</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>integration-tests</id>
<properties>
<test.env>integration</test.env>
<skipUnitTests>true</skipUnitTests>
<skipIntegrationTests>false</skipIntegrationTests>
</properties>
</profile>
<profile>
<id>ci-cd</id>
<properties>
<test.env>unit</test.env>
<skipUnitTests>true</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.5.0</version>
<configuration>
<!-- Prevent gpg from using pinentry programs. Fixes:
gpg: signing failed: Inappropriate ioctl for device -->
<gpgArguments>
<arg>--pinentry-mode</arg>
<arg>loopback</arg>
</gpgArguments>
<!-- to disable the "missing" warnings. Remove the doclint to enable warnings-->
<doclint>all,-missing</doclint>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.13</version>
<extensions>true</extensions>
<configuration>
<serverId>ossrh</serverId>
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
<autoReleaseAfterClose>true</autoReleaseAfterClose>
</configuration>
</plugin>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Surefire Plugin for Unit Tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<skipTests>${skipUnitTests}</skipTests>
<includes>
<include>**/unittests/**/*.java</include>
</includes>
</configuration>
</plugin>
<!-- Failsafe Plugin for Integration Tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<configuration>
<includes>
<include>**/integrationtests/*.java</include>
</includes>
<excludes>
<exclude>**/unittests/*.java</exclude>
</excludes>
<skipTests>${skipIntegrationTests}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.5</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.7</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</build>
<dependencies>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.18.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.5.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.9</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.10.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>4.1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20240205</version>
<scope>test</scope>
</dependency>
</dependencies>
<distributionManagement>
<repository>
<id>mvn-repo-id</id>
</repository>
</distributionManagement>
<profiles>
<profile>
<id>ossrh</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<gpg.executable>gpg2</gpg.executable>
<test.env>unit</test.env>
<skipUnitTests>false</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.sonatype.central</groupId>
<artifactId>central-publishing-maven-plugin</artifactId>
<version>0.5.0</version>
<extensions>true</extensions>
<configuration>
<publishingServerId>mvn-repo-id</publishingServerId>
<autoPublish>true</autoPublish>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>unit-tests</id>
<properties>
<test.env>unit</test.env>
<skipUnitTests>false</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.11</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>integration-tests</id>
<properties>
<test.env>integration</test.env>
<skipUnitTests>true</skipUnitTests>
<skipIntegrationTests>false</skipIntegrationTests>
</properties>
</profile>
<profile>
<id>ci-cd</id>
<properties>
<test.env>unit</test.env>
<skipUnitTests>true</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
<configuration>
<!-- Prevent gpg from using pinentry programs. Fixes:
gpg: signing failed: Inappropriate ioctl for device -->
<gpgArguments>
<arg>--pinentry-mode</arg>
<arg>loopback</arg>
</gpgArguments>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.13</version>
<extensions>true</extensions>
<configuration>
<serverId>ossrh</serverId>
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
<autoReleaseAfterClose>true</autoReleaseAfterClose>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.7</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@@ -1,508 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.*;
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.http.HttpClient;
import java.net.http.HttpConnectTimeoutException;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** The base Ollama API class. */
@SuppressWarnings("DuplicatedCode")
public class OllamaAPI {
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
private final String host;
private long requestTimeoutSeconds = 3;
private boolean verbose = true;
private BasicAuth basicAuth;
/**
* Instantiates the Ollama API.
*
* @param host the host address of Ollama server
*/
public OllamaAPI(String host) {
if (host.endsWith("/")) {
this.host = host.substring(0, host.length() - 1);
} else {
this.host = host;
}
}
/**
* Set request timeout in seconds. Default is 3 seconds.
*
* @param requestTimeoutSeconds the request timeout in seconds
*/
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
/**
* Set/unset logging of responses
*
* @param verbose true/false
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
*
* @param username the username
* @param password the password
*/
public void setBasicAuth(String username, String password) {
this.basicAuth = new BasicAuth(username, password);
}
/**
* API to check the reachability of Ollama server.
*
* @return true if the server is reachable, false otherwise.
*/
public boolean ping() {
String url = this.host + "/api/tags";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null;
try {
httpRequest =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
HttpResponse<String> response = null;
try {
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
} catch (HttpConnectTimeoutException e) {
return false;
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
int statusCode = response.statusCode();
return statusCode == 200;
}
/**
* List available models from Ollama server.
*
* @return the list
*/
public List<Model> listModels()
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
String url = this.host + "/api/tags";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
HttpResponse<String> response =
httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper()
.readValue(responseString, ListModelsResponse.class)
.getModels();
} else {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/**
* Pull a model on the Ollama server from the list of <a
* href="https://ollama.ai/library">available models</a>.
*
* @param modelName the name of the model
*/
public void pullModel(String modelName)
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
String url = this.host + "/api/pull";
String jsonData = new ModelRequest(modelName).toString();
HttpRequest request =
getRequestBuilderDefault(new URI(url))
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.build();
HttpClient client = HttpClient.newHttpClient();
HttpResponse<InputStream> response =
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
String responseString = "";
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
ModelPullResponse modelPullResponse =
Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
if (verbose) {
logger.info(modelPullResponse.getStatus());
}
}
}
if (statusCode != 200) {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/**
* Gets model details from the Ollama server.
*
* @param modelName the model
* @return the model details
*/
public ModelDetail getModelDetails(String modelName)
throws IOException, OllamaBaseException, InterruptedException, URISyntaxException {
String url = this.host + "/api/show";
String jsonData = new ModelRequest(modelName).toString();
HttpRequest request =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
.build();
HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseBody = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class);
} else {
throw new OllamaBaseException(statusCode + " - " + responseBody);
}
}
/**
* Create a custom model from a model file. Read more about custom model file creation <a
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
*
* @param modelName the name of the custom model to be created.
* @param modelFilePath the path to model file that exists on the Ollama server.
*/
public void createModelWithFilePath(String modelName, String modelFilePath)
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/create";
String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString();
HttpRequest request =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.build();
HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode != 200) {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
// FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this
// if the issue is fixed in the Ollama API server.
if (responseString.contains("error")) {
throw new OllamaBaseException(responseString);
}
if (verbose) {
logger.info(responseString);
}
}
/**
* Create a custom model from a model file. Read more about custom model file creation <a
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
*
* @param modelName the name of the custom model to be created.
* @param modelFileContents the path to model file that exists on the Ollama server.
*/
public void createModelWithModelFileContents(String modelName, String modelFileContents)
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/create";
String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString();
HttpRequest request =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.build();
HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode != 200) {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
if (responseString.contains("error")) {
throw new OllamaBaseException(responseString);
}
if (verbose) {
logger.info(responseString);
}
}
/**
* Delete a model from Ollama server.
*
* @param modelName the name of the model to be deleted.
* @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server.
*/
public void deleteModel(String modelName, boolean ignoreIfNotPresent)
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
String url = this.host + "/api/delete";
String jsonData = new ModelRequest(modelName).toString();
HttpRequest request =
getRequestBuilderDefault(new URI(url))
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.build();
HttpClient client = HttpClient.newHttpClient();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseBody = response.body();
if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) {
return;
}
if (statusCode != 200) {
throw new OllamaBaseException(statusCode + " - " + responseBody);
}
}
/**
* Generate embeddings for a given text from a model
*
* @param model name of model to generate embeddings from
* @param prompt text to generate embeddings for
* @return embeddings
*/
public List<Double> generateEmbeddings(String model, String prompt)
throws IOException, InterruptedException, OllamaBaseException {
URI uri = URI.create(this.host + "/api/embeddings");
String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.header("Accept", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonData));
HttpRequest request = requestBuilder.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseBody = response.body();
if (statusCode == 200) {
EmbeddingResponse embeddingResponse =
Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
return embeddingResponse.getEmbedding();
} else {
throw new OllamaBaseException(statusCode + " - " + responseBody);
}
}
/**
* Ask a question to a model running on Ollama server. This is a sync/blocking call.
*
* @param model the ollama model to ask the question to
* @param prompt the prompt/question text
* @return OllamaResult that includes response text and time taken for response
*/
public OllamaResult ask(String model, String prompt)
throws OllamaBaseException, IOException, InterruptedException {
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
return askSync(ollamaRequestModel);
}
/**
* Ask a question to a model running on Ollama server and get a callback handle that can be used
* to check for status and get the response from the model later. This would be an
* async/non-blocking call.
*
* @param model the ollama model to ask the question to
* @param prompt the prompt/question text
* @return the ollama async result callback handle
*/
public OllamaAsyncResultCallback askAsync(String model, String prompt) {
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultCallback ollamaAsyncResultCallback =
new OllamaAsyncResultCallback(
getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
ollamaAsyncResultCallback.start();
return ollamaAsyncResultCallback;
}
/**
* With one or more image files, ask a question to a model running on Ollama server. This is a
* sync/blocking call.
*
* @param model the ollama model to ask the question to
* @param prompt the prompt/question text
* @param imageFiles the list of image files to use for the question
* @return OllamaResult that includes response text and time taken for response
*/
public OllamaResult askWithImageFiles(String model, String prompt, List<File> imageFiles)
throws OllamaBaseException, IOException, InterruptedException {
List<String> images = new ArrayList<>();
for (File imageFile : imageFiles) {
images.add(encodeFileToBase64(imageFile));
}
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
return askSync(ollamaRequestModel);
}
/**
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
* sync/blocking call.
*
* @param model the ollama model to ask the question to
* @param prompt the prompt/question text
* @param imageURLs the list of image URLs to use for the question
* @return OllamaResult that includes response text and time taken for response
*/
public OllamaResult askWithImageURLs(String model, String prompt, List<String> imageURLs)
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
List<String> images = new ArrayList<>();
for (String imageURL : imageURLs) {
images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL)));
}
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
return askSync(ollamaRequestModel);
}
private static String encodeFileToBase64(File file) throws IOException {
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
}
private static String encodeByteArrayToBase64(byte[] bytes) {
return Base64.getEncoder().encodeToString(bytes);
}
private static byte[] loadImageBytesFromUrl(String imageUrl)
throws IOException, URISyntaxException {
URL url = new URI(imageUrl).toURL();
try (InputStream in = url.openStream();
ByteArrayOutputStream out = new ByteArrayOutputStream()) {
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
return out.toByteArray();
}
}
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
throws OllamaBaseException, IOException, InterruptedException {
long startTime = System.currentTimeMillis();
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(this.host + "/api/generate");
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)));
HttpRequest request = requestBuilder.build();
logger.debug("Ask model '" + ollamaRequestModel + "' ...");
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
logger.warn("Status code: 404 (Not Found)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
logger.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(ollamaResponseModel.getResponse());
}
}
}
}
if (statusCode != 200) {
logger.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
long endTime = System.currentTimeMillis();
return new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
}
}
/**
* Get default request builder.
*
* @param uri URI to get a HttpRequest.Builder
* @return HttpRequest.Builder
*/
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder =
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
}
return requestBuilder;
}
/**
* Get basic authentication header value.
*
* @return basic authentication header value (encoded credentials)
*/
private String getBasicAuthHeaderValue() {
String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword();
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
}
/**
* Check if Basic Auth credentials set.
*
* @return true when Basic Auth credentials set
*/
private boolean isBasicAuthCredentialsSet() {
return basicAuth != null;
}
}

View File

@@ -1,20 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelDetail {
private String license;
@JsonProperty("modelfile")
private String modelFile;
private String parameters;
private String template;
private String system;
private Map<String, String> details;
}

View File

@@ -1,141 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.LinkedList;
import java.util.Queue;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@Data
@EqualsAndHashCode(callSuper = true)
@SuppressWarnings("unused")
public class OllamaAsyncResultCallback extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaRequestModel ollamaRequestModel;
private final Queue<String> queue = new LinkedList<>();
private String result;
private boolean isDone;
/**
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
* failure. If the request was a failure, the `getResponse()` method will return the error
* message.
*/
@Getter private boolean succeeded;
private long requestTimeoutSeconds;
/**
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
* server.
*/
@Getter private int httpStatusCode;
/** -- GETTER -- Returns the response time in milliseconds. */
@Getter private long responseTime = 0;
public OllamaAsyncResultCallback(
HttpRequest.Builder requestBuilder,
OllamaRequestModel ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
this.isDone = false;
this.result = "";
this.queue.add("");
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
@Override
public void run() {
HttpClient httpClient = HttpClient.newHttpClient();
try {
long startTime = System.currentTimeMillis();
HttpRequest request =
requestBuilder
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.build();
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
this.httpStatusCode = statusCode;
InputStream responseBodyStream = response.body();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
queue.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
queue.add(ollamaResponseModel.getResponse());
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(ollamaResponseModel.getResponse());
}
}
}
this.isDone = true;
this.succeeded = true;
this.result = responseBuffer.toString();
long endTime = System.currentTimeMillis();
responseTime = endTime - startTime;
}
if (statusCode != 200) {
throw new OllamaBaseException(this.result);
}
} catch (IOException | InterruptedException | OllamaBaseException e) {
this.isDone = true;
this.succeeded = false;
this.result = "[FAILED] " + e.getMessage();
}
}
/**
* Returns the status of the thread. This does not indicate that the request was successful or a
* failure, rather it is just a status flag to indicate if the thread is active or ended.
*
* @return boolean - status
*/
public boolean isComplete() {
return isDone;
}
/**
* Returns the final response when the execution completes. Does not return intermediate results.
*
* @return String - response text
*/
public String getResponse() {
return result;
}
public Queue<String> getStream() {
return queue;
}
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
}

View File

@@ -1,13 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaErrorResponseModel {
private String error;
}

View File

@@ -1,35 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.util.List;
import lombok.Data;
@Data
public class OllamaRequestModel {
private String model;
private String prompt;
private List<String> images;
public OllamaRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,23 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class ModelEmbeddingsRequest {
private String model;
private String prompt;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,63 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.types;
/**
* A class to provide constants for all the supported models by Ollama.
*
* <p>Refer to the full list of models and the details here: <a
* href="https://ollama.ai/library">https://ollama.ai/library</a>
*/
@SuppressWarnings("ALL")
public class OllamaModelType {
public static final String LLAMA2 = "llama2";
public static final String MISTRAL = "mistral";
public static final String LLAVA = "llava";
public static final String MIXTRAL = "mixtral";
public static final String STARLING_LM = "starling-lm";
public static final String NEURAL_CHAT = "neural-chat";
public static final String CODELLAMA = "codellama";
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
public static final String ORCA_MINI = "orca-mini";
public static final String VICUNA = "vicuna";
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
public static final String PHIND_CODELLAMA = "phind-codellama";
public static final String ZEPHYR = "zephyr";
public static final String WIZARDCODER = "wizardcoder";
public static final String MISTRAL_OPENORCA = "mistral-openorca";
public static final String NOUS_HERMES = "nous-hermes";
public static final String DEEPSEEK_CODER = "deepseek-coder";
public static final String WIZARD_MATH = "wizard-math";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String FALCON = "falcon";
public static final String ORCA2 = "orca2";
public static final String STABLE_BELUGA = "stable-beluga";
public static final String CODEUP = "codeup";
public static final String EVERYTHINGLM = "everythinglm";
public static final String MEDLLAMA2 = "medllama2";
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
public static final String STARCODER = "starcoder";
public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
public static final String OPENCHAT = "openchat";
public static final String WIZARD_VICUNA = "wizard-vicuna";
public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
public static final String YI = "yi";
public static final String YARN_MISTRAL = "yarn-mistral";
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
public static final String SQLCODER = "sqlcoder";
public static final String YARN_LLAMA2 = "yarn-llama2";
public static final String MEDITRON = "meditron";
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
public static final String DEEPSEEK_LLM = "deepseek-llm";
public static final String MISTRALLITE = "mistrallite";
public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
public static final String WIZARDLM = "wizardlm";
public static final String CODEBOOGA = "codebooga";
public static final String MAGICODER = "magicoder";
public static final String GOLIATH = "goliath";
public static final String NEXUSRAVEN = "nexusraven";
public static final String ALFRED = "alfred";
public static final String XWINLM = "xwinlm";
public static final String BAKLLAVA = "bakllava";
}

View File

@@ -1,9 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.utils;
import com.fasterxml.jackson.databind.ObjectMapper;
public class Utils {
public static ObjectMapper getObjectMapper() {
return new ObjectMapper();
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions;
package io.github.ollama4j.exceptions;
public class OllamaBaseException extends Exception {

View File

@@ -0,0 +1,8 @@
package io.github.ollama4j.exceptions;
public class RoleNotFoundException extends Exception {
public RoleNotFoundException(String s) {
super(s);
}
}

View File

@@ -0,0 +1,8 @@
package io.github.ollama4j.exceptions;
public class ToolInvocationException extends Exception {
public ToolInvocationException(String s, Exception e) {
super(s, e);
}
}

View File

@@ -0,0 +1,8 @@
package io.github.ollama4j.exceptions;
public class ToolNotFoundException extends Exception {
public ToolNotFoundException(String s) {
super(s);
}
}

View File

@@ -0,0 +1,14 @@
package io.github.ollama4j.impl;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer();
@Override
public void accept(String message) {
String substr = message.substring(response.length());
response.append(substr);
System.out.print(substr);
}
}

View File

@@ -0,0 +1,49 @@
package io.github.ollama4j.models.chat;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.ollama4j.utils.FileToBase64Serializer;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
/**
* Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
*
* @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
*/
@Data
@AllArgsConstructor
@RequiredArgsConstructor
@NoArgsConstructor
public class OllamaChatMessage {
@NonNull
private OllamaChatMessageRole role;
@NonNull
private String content;
private @JsonProperty("tool_calls") List<OllamaChatToolCalls> toolCalls;
@JsonSerialize(using = FileToBase64Serializer.class)
private List<byte[]> images;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,53 @@
package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonValue;
import io.github.ollama4j.exceptions.RoleNotFoundException;
import lombok.Getter;
import java.util.ArrayList;
import java.util.List;
/**
* Defines the possible Chat Message roles.
*/
@Getter
public class OllamaChatMessageRole {
private static final List<OllamaChatMessageRole> roles = new ArrayList<>();
public static final OllamaChatMessageRole SYSTEM = new OllamaChatMessageRole("system");
public static final OllamaChatMessageRole USER = new OllamaChatMessageRole("user");
public static final OllamaChatMessageRole ASSISTANT = new OllamaChatMessageRole("assistant");
public static final OllamaChatMessageRole TOOL = new OllamaChatMessageRole("tool");
@JsonValue
private final String roleName;
private OllamaChatMessageRole(String roleName) {
this.roleName = roleName;
roles.add(this);
}
public static OllamaChatMessageRole newCustomRole(String roleName) {
OllamaChatMessageRole customRole = new OllamaChatMessageRole(roleName);
roles.add(customRole);
return customRole;
}
public static List<OllamaChatMessageRole> getRoles() {
return new ArrayList<>(roles);
}
public static OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException {
for (OllamaChatMessageRole role : roles) {
if (role.roleName.equals(roleName)) {
return role;
}
}
throw new RoleNotFoundException("Invalid role name: " + roleName);
}
@Override
public String toString() {
return roleName;
}
}

View File

@@ -0,0 +1,43 @@
package io.github.ollama4j.models.chat;
import java.util.List;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.tools.Tools;
import io.github.ollama4j.utils.OllamaRequestBody;
import lombok.Getter;
import lombok.Setter;
/**
* Defines a Request to use against the ollama /api/chat endpoint.
*
* @see <a href=
* "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
* Chat Completion</a>
*/
@Getter
@Setter
public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
private List<OllamaChatMessage> messages;
private List<Tools.PromptFuncDefinition> tools;
public OllamaChatRequest() {}
public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
this.model = model;
this.messages = messages;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaChatRequest)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -0,0 +1,111 @@
package io.github.ollama4j.models.chat;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
*/
public class OllamaChatRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
request = new OllamaChatRequest(model, messages);
}
private OllamaChatRequest request;
public static OllamaChatRequestBuilder getInstance(String model) {
return new OllamaChatRequestBuilder(model, new ArrayList<>());
}
public OllamaChatRequest build() {
return request;
}
public void reset() {
request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content){
return withMessage(role,content, Collections.emptyList());
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<OllamaChatToolCalls> toolCalls,List<File> images) {
List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = images.stream().map(file -> {
try {
return Files.readAllBytes(file.toPath());
} catch (IOException e) {
LOG.warn("File '{}' could not be accessed, will not add to message!", file.toPath(), e);
return new byte[0];
}
}).collect(Collectors.toList());
messages.add(new OllamaChatMessage(role, content,toolCalls, binaryImages));
return this;
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content,List<OllamaChatToolCalls> toolCalls, String... imageUrls) {
List<OllamaChatMessage> messages = this.request.getMessages();
List<byte[]> binaryImages = null;
if (imageUrls.length > 0) {
binaryImages = new ArrayList<>();
for (String imageUrl : imageUrls) {
try {
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
} catch (URISyntaxException e) {
LOG.warn("URL '{}' could not be accessed, will not add to message!", imageUrl, e);
} catch (IOException e) {
LOG.warn("Content of URL '{}' could not be read, will not add to message!", imageUrl, e);
}
}
}
messages.add(new OllamaChatMessage(role, content,toolCalls, binaryImages));
return this;
}
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
return new OllamaChatRequestBuilder(request.getModel(), messages);
}
public OllamaChatRequestBuilder withOptions(Options options) {
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaChatRequestBuilder withGetJsonResponse() {
this.request.setReturnFormatJson(true);
return this;
}
public OllamaChatRequestBuilder withTemplate(String template) {
this.request.setTemplate(template);
return this;
}
public OllamaChatRequestBuilder withStreaming() {
this.request.setStream(true);
return this;
}
public OllamaChatRequestBuilder withKeepAlive(String keepAlive) {
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -0,0 +1,23 @@
package io.github.ollama4j.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
@Data
public class OllamaChatResponseModel {
private String model;
private @JsonProperty("created_at") String createdAt;
private @JsonProperty("done_reason") String doneReason;
private OllamaChatMessage message;
private boolean done;
private String error;
private List<Integer> context;
private @JsonProperty("total_duration") Long totalDuration;
private @JsonProperty("load_duration") Long loadDuration;
private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
private @JsonProperty("eval_duration") Long evalDuration;
private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
private @JsonProperty("eval_count") Integer evalCount;
}

View File

@@ -0,0 +1,55 @@
package io.github.ollama4j.models.chat;
import java.util.List;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Getter;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
/**
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
* {@link OllamaChatMessageRole#ASSISTANT} role.
*/
@Getter
public class OllamaChatResult {
private List<OllamaChatMessage> chatHistory;
private OllamaChatResponseModel responseModel;
public OllamaChatResult(OllamaChatResponseModel responseModel, List<OllamaChatMessage> chatHistory) {
this.chatHistory = chatHistory;
this.responseModel = responseModel;
appendAnswerToChatHistory(responseModel);
}
private void appendAnswerToChatHistory(OllamaChatResponseModel response) {
this.chatHistory.add(response.getMessage());
}
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Deprecated
public String getResponse(){
return responseModel != null ? responseModel.getMessage().getContent() : "";
}
@Deprecated
public int getHttpStatusCode(){
return 200;
}
@Deprecated
public long getResponseTime(){
return responseModel != null ? responseModel.getTotalDuration() : 0L;
}
}

View File

@@ -0,0 +1,19 @@
package io.github.ollama4j.models.chat;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.models.generate.OllamaTokenHandler;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class OllamaChatStreamObserver implements OllamaTokenHandler {
private final OllamaStreamHandler streamHandler;
private String message = "";
@Override
public void accept(OllamaChatResponseModel token) {
if (streamHandler != null) {
message += token.getMessage().getContent();
streamHandler.accept(message);
}
}
}

View File

@@ -0,0 +1,16 @@
package io.github.ollama4j.models.chat;
import io.github.ollama4j.tools.OllamaToolCallsFunction;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class OllamaChatToolCalls {
private OllamaToolCallsFunction function;
}

View File

@@ -0,0 +1,40 @@
package io.github.ollama4j.models.embeddings;
import io.github.ollama4j.utils.Options;
import java.util.List;
/**
* Builder class to easily create Requests for Embedding models using ollama.
*/
public class OllamaEmbedRequestBuilder {
private final OllamaEmbedRequestModel request;
private OllamaEmbedRequestBuilder(String model, List<String> input) {
this.request = new OllamaEmbedRequestModel(model,input);
}
public static OllamaEmbedRequestBuilder getInstance(String model, String... input){
return new OllamaEmbedRequestBuilder(model, List.of(input));
}
public OllamaEmbedRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaEmbedRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
public OllamaEmbedRequestBuilder withoutTruncate(){
this.request.setTruncate(false);
return this;
}
public OllamaEmbedRequestModel build() {
return this.request;
}
}

View File

@@ -0,0 +1,41 @@
package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import java.util.List;
import java.util.Map;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
@Data
@RequiredArgsConstructor
@NoArgsConstructor
public class OllamaEmbedRequestModel {
@NonNull
private String model;
@NonNull
private List<String> input;
private Map<String, Object> options;
@JsonProperty(value = "keep_alive")
private String keepAlive;
@JsonProperty(value = "truncate")
private Boolean truncate = true;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,25 @@
package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
@SuppressWarnings("unused")
@Data
public class OllamaEmbedResponseModel {
@JsonProperty("model")
private String model;
@JsonProperty("embeddings")
private List<List<Double>> embeddings;
@JsonProperty("total_duration")
private long totalDuration;
@JsonProperty("load_duration")
private long loadDuration;
@JsonProperty("prompt_eval_count")
private int promptEvalCount;
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -7,7 +7,8 @@ import lombok.Data;
@SuppressWarnings("unused")
@Data
public class EmbeddingResponse {
@Deprecated(since="1.0.90")
public class OllamaEmbeddingResponseModel {
@JsonProperty("embedding")
private List<Double> embedding;
}

View File

@@ -0,0 +1,32 @@
package io.github.ollama4j.models.embeddings;
import io.github.ollama4j.utils.Options;
@Deprecated(since="1.0.90")
public class OllamaEmbeddingsRequestBuilder {
private OllamaEmbeddingsRequestBuilder(String model, String prompt){
request = new OllamaEmbeddingsRequestModel(model, prompt);
}
private OllamaEmbeddingsRequestModel request;
public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
return new OllamaEmbeddingsRequestBuilder(model, prompt);
}
public OllamaEmbeddingsRequestModel build(){
return request;
}
public OllamaEmbeddingsRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -0,0 +1,34 @@
package io.github.ollama4j.models.embeddings;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
@Data
@RequiredArgsConstructor
@NoArgsConstructor
@Deprecated(since="1.0.90")
public class OllamaEmbeddingsRequestModel {
@NonNull
private String model;
@NonNull
private String prompt;
protected Map<String, Object> options;
@JsonProperty(value = "keep_alive")
private String keepAlive;
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,46 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequest() {
}
public OllamaGenerateRequest(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequest(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequest)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@@ -0,0 +1,55 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.utils.Options;
/**
* Helper class for creating {@link OllamaGenerateRequest}
* objects using the builder-pattern.
*/
public class OllamaGenerateRequestBuilder {
private OllamaGenerateRequestBuilder(String model, String prompt){
request = new OllamaGenerateRequest(model, prompt);
}
private OllamaGenerateRequest request;
public static OllamaGenerateRequestBuilder getInstance(String model){
return new OllamaGenerateRequestBuilder(model,"");
}
public OllamaGenerateRequest build(){
return request;
}
public OllamaGenerateRequestBuilder withPrompt(String prompt){
request.setPrompt(prompt);
return this;
}
public OllamaGenerateRequestBuilder withGetJsonResponse(){
this.request.setReturnFormatJson(true);
return this;
}
public OllamaGenerateRequestBuilder withOptions(Options options){
this.request.setOptions(options.getOptionsMap());
return this;
}
public OllamaGenerateRequestBuilder withTemplate(String template){
this.request.setTemplate(template);
return this;
}
public OllamaGenerateRequestBuilder withStreaming(){
this.request.setStream(true);
return this;
}
public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
this.request.setKeepAlive(keepAlive);
return this;
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.generate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -8,7 +8,7 @@ import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaResponseModel {
public class OllamaGenerateResponseModel {
private String model;
private @JsonProperty("created_at") String createdAt;
private String response;

View File

@@ -0,0 +1,29 @@
package io.github.ollama4j.models.generate;
import java.util.ArrayList;
import java.util.List;
public class OllamaGenerateStreamObserver {
private OllamaStreamHandler streamHandler;
private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
private String message = "";
public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
this.streamHandler = streamHandler;
}
public void notify(OllamaGenerateResponseModel currentResponsePart) {
responseParts.add(currentResponsePart);
handleCurrentResponsePart(currentResponsePart);
}
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) {
message = message + currentResponsePart.getResponse();
streamHandler.accept(message);
}
}

View File

@@ -0,0 +1,7 @@
package io.github.ollama4j.models.generate;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String> {
void accept(String message);
}

View File

@@ -0,0 +1,8 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.chat.OllamaChatResponseModel;
import java.util.function.Consumer;
public interface OllamaTokenHandler extends Consumer<OllamaChatResponseModel> {
}

View File

@@ -0,0 +1,63 @@
package io.github.ollama4j.models.ps;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelsProcessResponse {
@JsonProperty("models")
private List<ModelProcess> models;
@Data
@NoArgsConstructor
public static class ModelProcess {
@JsonProperty("name")
private String name;
@JsonProperty("model")
private String model;
@JsonProperty("size")
private long size;
@JsonProperty("digest")
private String digest;
@JsonProperty("details")
private ModelDetails details;
@JsonProperty("expires_at")
private String expiresAt; // Consider using LocalDateTime if you need to process date/time
@JsonProperty("size_vram")
private long sizeVram;
}
@Data
@NoArgsConstructor
public static class ModelDetails {
@JsonProperty("parent_model")
private String parentModel;
@JsonProperty("format")
private String format;
@JsonProperty("family")
private String family;
@JsonProperty("families")
private List<String> families;
@JsonProperty("parameter_size")
private String parameterSize;
@JsonProperty("quantization_level")
private String quantizationLevel;
}
}

View File

@@ -1,4 +1,4 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.request;
import lombok.AllArgsConstructor;
import lombok.Data;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@@ -0,0 +1,45 @@
package io.github.ollama4j.models.request;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.Builder;
import java.util.List;
import java.util.Map;
@Data
@AllArgsConstructor
@Builder
public class CustomModelRequest {
private String model;
private String from;
private Map<String, String> files;
private Map<String, String> adapters;
private String template;
private Object license; // Using Object to handle both String and List<String>
private String system;
private Map<String, Object> parameters;
private List<Object> messages;
private Boolean stream;
private Boolean quantize;
public CustomModelRequest() {
this.stream = true;
this.quantize = false;
}
@Override
public String toString() {
try {
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
package io.github.ollama4j.models.request;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@@ -0,0 +1,144 @@
package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.chat.*;
import io.github.ollama4j.models.generate.OllamaTokenHandler;
import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.util.List;
/**
* Specialization class for requests
*/
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
private OllamaTokenHandler tokenHandler;
public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/chat";
}
/**
* Parses streamed Response line from ollama chat.
* Using {@link com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw
* {@link IllegalArgumentException} in case of null line or {@link com.fasterxml.jackson.core.JsonParseException}
* in case the JSON Object cannot be parsed to a {@link OllamaChatResponseModel}. Thus, the ResponseModel should
* never be null.
*
* @param line streamed line of ollama stream response
* @param responseBuffer Stringbuffer to add latest response message part to
* @return TRUE, if ollama-Response has 'done' state
*/
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
// it seems that under heavy load ollama responds with an empty chat message part in the streamed response
// thus, we null check the message and hope that the next streamed response has some message content again
OllamaChatMessage message = ollamaResponseModel.getMessage();
if(message != null) {
responseBuffer.append(message.getContent());
if (tokenHandler != null) {
tokenHandler.accept(ollamaResponseModel);
}
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!", e);
return true;
}
}
public OllamaChatResult call(OllamaChatRequest body, OllamaTokenHandler tokenHandler)
throws OllamaBaseException, IOException, InterruptedException {
this.tokenHandler = tokenHandler;
return callSync(body);
}
public OllamaChatResult callSync(OllamaChatRequest body) throws OllamaBaseException, IOException, InterruptedException {
// Create Request
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(getHost() + getEndpointSuffix());
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
if (isVerbose()) LOG.info("Asking model: " + body);
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
OllamaChatResponseModel ollamaChatResponseModel = null;
List<OllamaChatToolCalls> wantedToolsForStream = null;
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
ollamaChatResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
if(body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null){
wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls();
}
if (finished && body.stream) {
ollamaChatResponseModel.getMessage().setContent(responseBuffer.toString());
break;
}
}
}
}
if (statusCode != 200) {
LOG.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
if(wantedToolsForStream != null) {
ollamaChatResponseModel.getMessage().setToolCalls(wantedToolsForStream);
}
OllamaChatResult ollamaResult =
new OllamaChatResult(ollamaChatResponseModel,body.getMessages());
if (isVerbose()) LOG.info("Model response: " + ollamaResult);
return ollamaResult;
}
}
}

View File

@@ -0,0 +1,35 @@
package io.github.ollama4j.models.request;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
import io.github.ollama4j.utils.Utils;
import lombok.Data;
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class OllamaCommonRequest {
protected String model;
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
@JsonProperty(value = "format")
protected Boolean returnFormatJson;
protected Map<String, Object> options;
protected String template;
protected boolean stream;
@JsonProperty(value = "keep_alive")
protected String keepAlive;
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,86 @@
package io.github.ollama4j.models.request;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.ollama4j.utils.Utils;
import lombok.Getter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Base64;
/**
* Abstract helperclass to call the ollama api server.
*/
@Getter
public abstract class OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
private final String host;
private final BasicAuth basicAuth;
private final long requestTimeoutSeconds;
private final boolean verbose;
public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
this.host = host;
this.basicAuth = basicAuth;
this.requestTimeoutSeconds = requestTimeoutSeconds;
this.verbose = verbose;
}
protected abstract String getEndpointSuffix();
protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
/**
* Get default request builder.
*
* @param uri URI to get a HttpRequest.Builder
* @return HttpRequest.Builder
*/
protected HttpRequest.Builder getRequestBuilderDefault(URI uri) {
HttpRequest.Builder requestBuilder =
HttpRequest.newBuilder(uri)
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
if (isBasicAuthCredentialsSet()) {
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
}
return requestBuilder;
}
/**
* Get basic authentication header value.
*
* @return basic authentication header value (encoded credentials)
*/
protected String getBasicAuthHeaderValue() {
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
}
/**
* Check if Basic Auth credentials set.
*
* @return true when Basic Auth credentials set
*/
protected boolean isBasicAuthCredentialsSet() {
return this.basicAuth != null;
}
}

View File

@@ -0,0 +1,127 @@
package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
private OllamaGenerateStreamObserver streamObserver;
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/generate";
}
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
responseBuffer.append(ollamaResponseModel.getResponse());
if (streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!", e);
return true;
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
return callSync(body);
}
/**
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
*
* @param body POST body payload
* @return result answer given by the assistant
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen
*/
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
// Create Request
long startTime = System.currentTimeMillis();
HttpClient httpClient = HttpClient.newHttpClient();
URI uri = URI.create(getHost() + getEndpointSuffix());
HttpRequest.Builder requestBuilder =
getRequestBuilderDefault(uri)
.POST(
body.getBodyPublisher());
HttpRequest request = requestBuilder.build();
if (isVerbose()) LOG.info("Asking model: " + body.toString());
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
InputStream responseBodyStream = response.body();
StringBuilder responseBuffer = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponse.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
if (finished) {
break;
}
}
}
}
if (statusCode != 200) {
LOG.error("Status code " + statusCode);
throw new OllamaBaseException(responseBuffer.toString());
} else {
long endTime = System.currentTimeMillis();
OllamaResult ollamaResult =
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
if (isVerbose()) LOG.info("Model response: " + ollamaResult);
return ollamaResult;
}
}
}

View File

@@ -0,0 +1,16 @@
package io.github.ollama4j.models.response;
import java.util.ArrayList;
import java.util.List;
import lombok.Data;
@Data
public class LibraryModel {
private String name;
private String description;
private String pullCount;
private int totalTags;
private List<String> popularTags = new ArrayList<>();
private String lastUpdated;
}

View File

@@ -0,0 +1,12 @@
package io.github.ollama4j.models.response;
import lombok.Data;
import java.util.List;
@Data
public class LibraryModelDetail {
private LibraryModel model;
private List<LibraryModelTag> tags;
}

View File

@@ -0,0 +1,13 @@
package io.github.ollama4j.models.response;
import lombok.Data;
import java.util.List;
@Data
public class LibraryModelTag {
private String name;
private String tag;
private String size;
private String lastUpdated;
}

View File

@@ -1,6 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.response;
import java.util.List;
import lombok.Data;
@Data

View File

@@ -1,14 +1,21 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.response;
import java.time.OffsetDateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import lombok.Data;
@Data
public class Model {
private String name;
private String model;
@JsonProperty("modified_at")
private String modifiedAt;
private OffsetDateTime modifiedAt;
@JsonProperty("expires_at")
private OffsetDateTime expiresAt;
private String digest;
private long size;
@JsonProperty("details")
@@ -33,4 +40,13 @@ public class Model {
return name.split(":")[1];
}
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,30 @@
package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelDetail {
private String license;
@JsonProperty("modelfile")
private String modelFile;
private String parameters;
private String template;
private String system;
private ModelMeta details;
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -1,7 +1,9 @@
package io.github.amithkoujalgi.ollama4j.core.models;
package io.github.ollama4j.models.response;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import lombok.Data;
@Data
@@ -21,4 +23,13 @@ public class ModelMeta {
@JsonProperty("quantization_level")
private String quantizationLevel;
@Override
public String toString() {
try {
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

Some files were not shown because too many files have changed in this diff Show More