forked from Mirror/ollama4j
		
	Compare commits
	
		
			93 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | e750c2d7f9 | ||
|   | 62f16131f3 | ||
|   | 2cbaf12d7c | ||
|   | e2d555d404 | ||
|   | c296b34174 | ||
|   | e8f99f28ec | ||
|   | 250b1abc79 | ||
|   | 42b15ad93f | ||
|   | 6f7a714bae | ||
|   | 92618e5084 | ||
|   | 391a9242c3 | ||
|   | e1b6dc3b54 | ||
|   | 04124cf978 | ||
|   | e4e717b747 | ||
|   | 10d2a8f5ff | ||
|   | 899fa38805 | ||
|   | 2df878c953 | ||
|   | 78a5eedc8f | ||
|   | 364f961ee2 | ||
|   | b21aa6add2 | ||
|   | ec4abd1c2d | ||
|   | 9900ae92fb | ||
|   | fa20daf6e5 | ||
|   | 44949c0559 | ||
|   | e88711a017 | ||
|   | 32169ded18 | ||
|   | 4b2d566fd9 | ||
|   | fb4b7a7ce5 | ||
|   | 18f27775b0 | ||
|   | cb462ad05a | ||
|   | 1eec22ca1a | ||
|   | c1f3c51f88 | ||
|   | 7dd556293f | ||
|   | ee50131ce4 | ||
|   | 2cd47dbfaa | ||
|   | e5296c1067 | ||
|   | 0f00f05e3d | ||
|   | 976a3b82e5 | ||
|   | ba26d620c4 | ||
|   | e45246a767 | ||
|   | 7336668f0c | ||
|   | 11701fb222 | ||
|   | b1ec12c4e9 | ||
|   | d0b0a0fc97 | ||
|   | 20774fca6b | ||
|   | 9c46b510d8 | ||
|   | 9d887b60a8 | ||
|   | 63d4de4e24 | ||
|   | a10692e2f1 | ||
|   | b0c152a42e | ||
|   | f44767e023 | ||
|   | aadef0a57c | ||
|   | 777ee7ffe0 | ||
|   | dcf1d0bdbc | ||
|   | 13b7111a42 | ||
|   | 09442d37a3 | ||
|   | 1e66bdb07f | ||
|   | b423090db9 | ||
|   | a32d94efbf | ||
|   | 31f8302849 | ||
|   | 6487756764 | ||
|   | abb76ad867 | ||
|   | cf4e7a96e8 | ||
|   | 0f414f71a3 | ||
|   | 2b700fdad8 | ||
|   | 06c5daa253 | ||
|   | 91aab6cbd1 | ||
|   | f38a00ebdc | ||
|   | 0f73ea75ab | ||
|   | 8fe869afdb | ||
|   | 2d274c4f5b | ||
|   | 713a3239a4 | ||
|   | a9e7958d44 | ||
|   | f38e84053f | ||
|   | 7eb16b7ba0 | ||
|   | 5a3889d8ee | ||
|   | 2c52f4d0bb | ||
|   | 32c4231eb5 | ||
|   | e9621f054d | ||
|   | b41b62220c | ||
|   | c89440cbca | ||
|   | 1aeb555a53 | ||
|   | 9aff3ec5d9 | ||
|   | b4eaf0cfb5 | ||
|   | 199cb6082d | ||
|   | 37bfe26a6d | ||
|   | 3769386539 | ||
|   | 84a6e57f42 | ||
|   | 14d2474ee9 | ||
|   | ca613ed80a | ||
|   | bbcd458849 | ||
|   | bc885894f8 | ||
|   | bc83df6971 | 
							
								
								
									
										22
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								README.md
									
									
									
									
									
								
							| @@ -67,7 +67,7 @@ In your Maven project, add this dependency: | |||||||
| <dependency> | <dependency> | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.amithkoujalgi</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.47</version> |     <version>1.0.57</version> | ||||||
| </dependency> | </dependency> | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| @@ -110,6 +110,16 @@ make it | |||||||
| Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub | Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub | ||||||
| Actions CI workflow. | Actions CI workflow. | ||||||
|  |  | ||||||
|  | #### Who's using Ollama4j? | ||||||
|  |  | ||||||
|  | - `Datafaker`: a library to generate fake data | ||||||
|  |     - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api | ||||||
|  | - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j | ||||||
|  |     - https://github.com/TEAMPB/ollama4j-vaadin-ui | ||||||
|  | - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the | ||||||
|  |   server to translate all messages into a specfic target language. | ||||||
|  |     - https://github.com/liebki/ollama-translator | ||||||
|  |  | ||||||
| #### Traction | #### Traction | ||||||
|  |  | ||||||
| [](https://star-history.com/#amithkoujalgi/ollama4j&Date) | [](https://star-history.com/#amithkoujalgi/ollama4j&Date) | ||||||
| @@ -125,15 +135,15 @@ Actions CI workflow. | |||||||
| - [x] Update request body creation with Java objects | - [x] Update request body creation with Java objects | ||||||
| - [ ] Async APIs for images | - [ ] Async APIs for images | ||||||
| - [ ] Add custom headers to requests | - [ ] Add custom headers to requests | ||||||
| - [ ] Add additional params for `ask` APIs such as: | - [x] Add additional params for `ask` APIs such as: | ||||||
|     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - |     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - | ||||||
|       Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). |       Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||||
|     - [ ] `system`: system prompt to (overrides what is defined in the Modelfile) |     - [x] `system`: system prompt to (overrides what is defined in the Modelfile) | ||||||
|     - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) |     - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) | ||||||
|     - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a |     - [x] `context`: the context parameter returned from a previous request, which can be used to keep a | ||||||
|       short |       short | ||||||
|       conversational memory |       conversational memory | ||||||
|     - [ ] `stream`: Add support for streaming responses from the model |     - [x] `stream`: Add support for streaming responses from the model | ||||||
| - [ ] Add test cases | - [ ] Add test cases | ||||||
| - [ ] Handle exceptions better (maybe throw more appropriate exceptions) | - [ ] Handle exceptions better (maybe throw more appropriate exceptions) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,98 +0,0 @@ | |||||||
| --- |  | ||||||
| sidebar_position: 7 |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| # Chat |  | ||||||
|  |  | ||||||
| This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including  |  | ||||||
| information using the history of already asked questions and the respective answers. |  | ||||||
|  |  | ||||||
| ## Create a new conversation and use chat history to augment follow up questions |  | ||||||
|  |  | ||||||
| ```java |  | ||||||
| public class Main { |  | ||||||
|  |  | ||||||
|     public static void main(String[] args) { |  | ||||||
|  |  | ||||||
|         String host = "http://localhost:11434/"; |  | ||||||
|  |  | ||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |  | ||||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); |  | ||||||
|  |  | ||||||
|         // create first user question |  | ||||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?") |  | ||||||
|              .build(); |  | ||||||
|  |  | ||||||
|         // start conversation with model |  | ||||||
|         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); |  | ||||||
|  |  | ||||||
|         System.out.println("First answer: " + chatResult.getResponse()); |  | ||||||
|  |  | ||||||
|         // create next userQuestion |  | ||||||
|         requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build(); |  | ||||||
|  |  | ||||||
|         // "continue" conversation with model |  | ||||||
|         chatResult = ollamaAPI.chat(requestModel); |  | ||||||
|  |  | ||||||
|         System.out.println("Second answer: " + chatResult.getResponse()); |  | ||||||
|  |  | ||||||
|         System.out.println("Chat History: " + chatResult.getChatHistory()); |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| ``` |  | ||||||
| You will get a response similar to: |  | ||||||
|  |  | ||||||
| > First answer: Should be Paris! |  | ||||||
| >  |  | ||||||
| > Second answer: Marseille. |  | ||||||
| >  |  | ||||||
| > Chat History: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| [ { |  | ||||||
|     "role" : "user", |  | ||||||
|     "content" : "What is the capital of France?", |  | ||||||
|     "images" : [ ] |  | ||||||
|   }, { |  | ||||||
|     "role" : "assistant", |  | ||||||
|     "content" : "Should be Paris!", |  | ||||||
|     "images" : [ ] |  | ||||||
|   }, { |  | ||||||
|     "role" : "user", |  | ||||||
|     "content" : "And what is the second largest city?", |  | ||||||
|     "images" : [ ] |  | ||||||
|   }, { |  | ||||||
|     "role" : "assistant", |  | ||||||
|     "content" : "Marseille.", |  | ||||||
|     "images" : [ ] |  | ||||||
|   } ] |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| ## Create a new conversation with individual system prompt |  | ||||||
| ```java |  | ||||||
| public class Main { |  | ||||||
|  |  | ||||||
|     public static void main(String[] args) { |  | ||||||
|  |  | ||||||
|         String host = "http://localhost:11434/"; |  | ||||||
|  |  | ||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |  | ||||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); |  | ||||||
|  |  | ||||||
|         // create request with system-prompt (overriding the model defaults) and user question |  | ||||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") |  | ||||||
|              .withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?") |  | ||||||
|              .build(); |  | ||||||
|  |  | ||||||
|         // start conversation with model |  | ||||||
|         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); |  | ||||||
|  |  | ||||||
|         System.out.println(chatResult.getResponse()); |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| ``` |  | ||||||
| You will get a response similar to: |  | ||||||
|  |  | ||||||
| > NI. |  | ||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Extras", |   "label": "APIs - Extras", | ||||||
|   "position": 10, |   "position": 4, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to handle bunch of extra stuff." |     "description": "Details of APIs to handle bunch of extra stuff." | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Ask", |   "label": "APIs - Generate", | ||||||
|   "position": 10, |   "position": 3, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to interact with LLMs." |     "description": "Details of APIs to interact with LLMs." | ||||||
							
								
								
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,205 @@ | |||||||
|  | --- | ||||||
|  | sidebar_position: 7 | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # Chat | ||||||
|  |  | ||||||
|  | This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including | ||||||
|  | information using the history of already asked questions and the respective answers. | ||||||
|  |  | ||||||
|  | ## Create a new conversation and use chat history to augment follow up questions | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |  | ||||||
|  |         // create first user question | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // start conversation with model | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println("First answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         // create next userQuestion | ||||||
|  |         requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build(); | ||||||
|  |  | ||||||
|  |         // "continue" conversation with model | ||||||
|  |         chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println("Second answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         System.out.println("Chat History: " + chatResult.getChatHistory()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > First answer: Should be Paris! | ||||||
|  | > | ||||||
|  | > Second answer: Marseille. | ||||||
|  | > | ||||||
|  | > Chat History: | ||||||
|  |  | ||||||
|  | ```json | ||||||
|  | [ | ||||||
|  |   { | ||||||
|  |     "role": "user", | ||||||
|  |     "content": "What is the capital of France?", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "assistant", | ||||||
|  |     "content": "Should be Paris!", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "user", | ||||||
|  |     "content": "And what is the second largest city?", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "assistant", | ||||||
|  |     "content": "Marseille.", | ||||||
|  |     "images": [] | ||||||
|  |   } | ||||||
|  | ] | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Create a conversation where the answer is streamed | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||||
|  |                         "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // define a handler (Consumer<String>) | ||||||
|  |         OllamaStreamHandler streamHandler = (s) -> { | ||||||
|  |             System.out.println(s); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > The | ||||||
|  | > The capital | ||||||
|  | > The capital of | ||||||
|  | > The capital of France | ||||||
|  | > The capital of France is | ||||||
|  | > The capital of France is Paris | ||||||
|  | > The capital of France is Paris. | ||||||
|  |  | ||||||
|  | ## Use a simple Console Output Stream Handler | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; | ||||||
|  |  | ||||||
|  | public class Main { | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |  | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") | ||||||
|  |                 .build(); | ||||||
|  |         OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); | ||||||
|  |         ollamaAPI.chat(requestModel, streamHandler); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Create a new conversation with individual system prompt | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |  | ||||||
|  |         // create request with system-prompt (overriding the model defaults) and user question | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||||
|  |                 .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // start conversation with model | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println(chatResult.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > NI. | ||||||
|  |  | ||||||
|  | ## Create a conversation about an image (requires model with image recognition skills) | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); | ||||||
|  |  | ||||||
|  |         // Load Image from File and attach to user message (alternatively images could also be added via URL) | ||||||
|  |         OllamaChatRequestModel requestModel = | ||||||
|  |                 builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |                         List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||||
|  |  | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |         System.out.println("First answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         builder.reset(); | ||||||
|  |  | ||||||
|  |         // Use history to ask further questions about the image or assistant answer | ||||||
|  |         requestModel = | ||||||
|  |                 builder.withMessages(chatResult.getChatHistory()) | ||||||
|  |                         .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||||
|  |  | ||||||
|  |         chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |         System.out.println("Second answer: " + chatResult.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two | ||||||
|  | > levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and | ||||||
|  | > comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early | ||||||
|  | > evening, given the warm lighting and the low position of the sun in the sky. | ||||||
|  | > | ||||||
|  | > Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog | ||||||
|  | > appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever | ||||||
|  | > or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed | ||||||
|  | > confidently. | ||||||
| @@ -41,6 +41,41 @@ You will get a response similar to: | |||||||
| > require | > require | ||||||
| > natural language understanding and generation capabilities. | > natural language understanding and generation capabilities. | ||||||
| 
 | 
 | ||||||
|  | ## Try asking a question, receiving the answer streamed | ||||||
|  | 
 | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  | 
 | ||||||
|  |     public static void main(String[] args) { | ||||||
|  | 
 | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  | 
 | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         // define a stream handler (Consumer<String>) | ||||||
|  |         OllamaStreamHandler streamHandler = (s) -> { | ||||||
|  |            System.out.println(s); | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         // Should be called using seperate thread to gain non blocking streaming effect. | ||||||
|  |         OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||||
|  |           "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |           new OptionsBuilder().build(), streamHandler); | ||||||
|  |          | ||||||
|  |         System.out.println("Full response: " +result.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  | You will get a response similar to: | ||||||
|  | 
 | ||||||
|  | > The | ||||||
|  | > The capital | ||||||
|  | > The capital of | ||||||
|  | > The capital of France | ||||||
|  | > The capital of France is  | ||||||
|  | > The capital of France is Paris | ||||||
|  | > The capital of France is Paris. | ||||||
|  | > Full response: The capital of France is Paris. | ||||||
|  | 
 | ||||||
| ## Try asking a question from general topics. | ## Try asking a question from general topics. | ||||||
| 
 | 
 | ||||||
| ```java | ```java | ||||||
| @@ -42,7 +42,7 @@ public class AskPhi { | |||||||
|                         .addSeparator() |                         .addSeparator() | ||||||
|                         .add("How do I read a file in Go and print its contents to stdout?"); |                         .add("How do I read a file in Go and print its contents to stdout?"); | ||||||
| 
 | 
 | ||||||
|         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build()); |         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); | ||||||
|         System.out.println(response.getResponse()); |         System.out.println(response.getResponse()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Model Management", |   "label": "APIs - Model Management", | ||||||
|   "position": 4, |   "position": 2, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to manage LLMs." |     "description": "Details of APIs to manage LLMs." | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -4,7 +4,7 @@ | |||||||
|  |  | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.amithkoujalgi</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.51</version> |     <version>1.0.69</version> | ||||||
|  |  | ||||||
|     <name>Ollama4j</name> |     <name>Ollama4j</name> | ||||||
|     <description>Java library for interacting with Ollama API.</description> |     <description>Java library for interacting with Ollama API.</description> | ||||||
| @@ -39,7 +39,7 @@ | |||||||
|         <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> |         <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> | ||||||
|         <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> |         <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> | ||||||
|         <url>https://github.com/amithkoujalgi/ollama4j</url> |         <url>https://github.com/amithkoujalgi/ollama4j</url> | ||||||
|         <tag>v1.0.51</tag> |         <tag>v1.0.69</tag> | ||||||
|     </scm> |     </scm> | ||||||
|  |  | ||||||
|     <build> |     <build> | ||||||
| @@ -99,7 +99,7 @@ | |||||||
|                 <configuration> |                 <configuration> | ||||||
|                     <skipTests>${skipUnitTests}</skipTests> |                     <skipTests>${skipUnitTests}</skipTests> | ||||||
|                     <includes> |                     <includes> | ||||||
|                         <include>**/unittests/*.java</include> |                         <include>**/unittests/**/*.java</include> | ||||||
|                     </includes> |                     </includes> | ||||||
|                 </configuration> |                 </configuration> | ||||||
|             </plugin> |             </plugin> | ||||||
| @@ -174,6 +174,12 @@ | |||||||
|             <version>4.1.0</version> |             <version>4.1.0</version> | ||||||
|             <scope>test</scope> |             <scope>test</scope> | ||||||
|         </dependency> |         </dependency> | ||||||
|  |         <dependency> | ||||||
|  |             <groupId>org.json</groupId> | ||||||
|  |             <artifactId>json</artifactId> | ||||||
|  |             <version>20240205</version> | ||||||
|  |             <scope>test</scope> | ||||||
|  |         </dependency> | ||||||
|     </dependencies> |     </dependencies> | ||||||
|  |  | ||||||
|     <distributionManagement> |     <distributionManagement> | ||||||
|   | |||||||
| @@ -6,23 +6,18 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest; | import io.github.amithkoujalgi.ollama4j.core.models.request.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import java.io.BufferedReader; | import org.slf4j.Logger; | ||||||
| import java.io.ByteArrayOutputStream; | import org.slf4j.LoggerFactory; | ||||||
| import java.io.File; |  | ||||||
| import java.io.IOException; | import java.io.*; | ||||||
| import java.io.InputStream; |  | ||||||
| import java.io.InputStreamReader; |  | ||||||
| import java.net.URI; | import java.net.URI; | ||||||
| import java.net.URISyntaxException; | import java.net.URISyntaxException; | ||||||
| import java.net.URL; |  | ||||||
| import java.net.http.HttpClient; | import java.net.http.HttpClient; | ||||||
| import java.net.http.HttpConnectTimeoutException; | import java.net.http.HttpConnectTimeoutException; | ||||||
| import java.net.http.HttpRequest; | import java.net.http.HttpRequest; | ||||||
| @@ -33,16 +28,16 @@ import java.time.Duration; | |||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.Base64; | import java.util.Base64; | ||||||
| import java.util.List; | import java.util.List; | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| /** The base Ollama API class. */ | /** | ||||||
|  |  * The base Ollama API class. | ||||||
|  |  */ | ||||||
| @SuppressWarnings("DuplicatedCode") | @SuppressWarnings("DuplicatedCode") | ||||||
| public class OllamaAPI { | public class OllamaAPI { | ||||||
|  |  | ||||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); |     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||||
|     private final String host; |     private final String host; | ||||||
|   private long requestTimeoutSeconds = 3; |     private long requestTimeoutSeconds = 10; | ||||||
|     private boolean verbose = true; |     private boolean verbose = true; | ||||||
|     private BasicAuth basicAuth; |     private BasicAuth basicAuth; | ||||||
|  |  | ||||||
| @@ -314,8 +309,18 @@ public class OllamaAPI { | |||||||
|      */ |      */ | ||||||
|     public List<Double> generateEmbeddings(String model, String prompt) |     public List<Double> generateEmbeddings(String model, String prompt) | ||||||
|             throws IOException, InterruptedException, OllamaBaseException { |             throws IOException, InterruptedException, OllamaBaseException { | ||||||
|  |         return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. | ||||||
|  |      * | ||||||
|  |      * @param modelRequest request for '/api/embeddings' endpoint | ||||||
|  |      * @return embeddings | ||||||
|  |      */ | ||||||
|  |     public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||||
|         URI uri = URI.create(this.host + "/api/embeddings"); |         URI uri = URI.create(this.host + "/api/embeddings"); | ||||||
|     String jsonData = new ModelEmbeddingsRequest(model, prompt).toString(); |         String jsonData = modelRequest.toString(); | ||||||
|         HttpClient httpClient = HttpClient.newHttpClient(); |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
|         HttpRequest.Builder requestBuilder = |         HttpRequest.Builder requestBuilder = | ||||||
|                 getRequestBuilderDefault(uri) |                 getRequestBuilderDefault(uri) | ||||||
| @@ -326,8 +331,8 @@ public class OllamaAPI { | |||||||
|         int statusCode = response.statusCode(); |         int statusCode = response.statusCode(); | ||||||
|         String responseBody = response.body(); |         String responseBody = response.body(); | ||||||
|         if (statusCode == 200) { |         if (statusCode == 200) { | ||||||
|       EmbeddingResponse embeddingResponse = |             OllamaEmbeddingResponseModel embeddingResponse = | ||||||
|           Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class); |                     Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||||
|             return embeddingResponse.getEmbedding(); |             return embeddingResponse.getEmbedding(); | ||||||
|         } else { |         } else { | ||||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); |             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||||
| @@ -343,13 +348,24 @@ public class OllamaAPI { | |||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|  |     public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|  |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Convenience method to call Ollama API without streaming responses. | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} | ||||||
|  |      */ | ||||||
|     public OllamaResult generate(String model, String prompt, Options options) |     public OllamaResult generate(String model, String prompt, Options options) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); |         return generate(model, prompt, options, null); | ||||||
|     ollamaRequestModel.setOptions(options.getOptionsMap()); |  | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel); |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -362,7 +378,7 @@ public class OllamaAPI { | |||||||
|      * @return the ollama async result callback handle |      * @return the ollama async result callback handle | ||||||
|      */ |      */ | ||||||
|     public OllamaAsyncResultCallback generateAsync(String model, String prompt) { |     public OllamaAsyncResultCallback generateAsync(String model, String prompt) { | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |  | ||||||
|         URI uri = URI.create(this.host + "/api/generate"); |         URI uri = URI.create(this.host + "/api/generate"); | ||||||
|         OllamaAsyncResultCallback ollamaAsyncResultCallback = |         OllamaAsyncResultCallback ollamaAsyncResultCallback = | ||||||
| @@ -382,18 +398,30 @@ public class OllamaAPI { | |||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|     public OllamaResult generateWithImageFiles( |     public OllamaResult generateWithImageFiles( | ||||||
|       String model, String prompt, List<File> imageFiles, Options options) |             String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         List<String> images = new ArrayList<>(); |         List<String> images = new ArrayList<>(); | ||||||
|         for (File imageFile : imageFiles) { |         for (File imageFile : imageFiles) { | ||||||
|             images.add(encodeFileToBase64(imageFile)); |             images.add(encodeFileToBase64(imageFile)); | ||||||
|         } |         } | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Convenience method to call Ollama API without streaming responses. | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||||
|  |      */ | ||||||
|  |     public OllamaResult generateWithImageFiles( | ||||||
|  |             String model, String prompt, List<File> imageFiles, Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -406,20 +434,31 @@ public class OllamaAPI { | |||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|     public OllamaResult generateWithImageURLs( |     public OllamaResult generateWithImageURLs( | ||||||
|       String model, String prompt, List<String> imageURLs, Options options) |             String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { |             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||||
|         List<String> images = new ArrayList<>(); |         List<String> images = new ArrayList<>(); | ||||||
|         for (String imageURL : imageURLs) { |         for (String imageURL : imageURLs) { | ||||||
|       images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL))); |             images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); | ||||||
|         } |         } | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Convenience method to call Ollama API without streaming responses. | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||||
|  |      */ | ||||||
|  |     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, | ||||||
|  |                                               Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||||
|  |         return generateWithImageURLs(model, prompt, imageURLs, options, null); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -433,14 +472,14 @@ public class OllamaAPI { | |||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|   public OllamaChatResult chat(String model, List<OllamaChatMessage> messages)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||||
|         return chat(builder.withMessages(messages).build()); |         return chat(builder.withMessages(messages).build()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|    *  |      * <p> | ||||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|      * |      * | ||||||
|      * @param request request object to be sent to the server |      * @param request request object to be sent to the server | ||||||
| @@ -449,13 +488,31 @@ public class OllamaAPI { | |||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|   public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); |         return chat(request, null); | ||||||
|     //TODO: implement async way |     } | ||||||
|     if(request.isStream()){ |  | ||||||
|       throw new UnsupportedOperationException("Streamed chat responses are not implemented yet"); |     /** | ||||||
|  |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|  |      * <p> | ||||||
|  |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|  |      * | ||||||
|  |      * @param request       request object to be sent to the server | ||||||
|  |      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) | ||||||
|  |      * @return | ||||||
|  |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|  |      * @throws IOException          in case the responseStream can not be read | ||||||
|  |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|  |      */ | ||||||
|  |     public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|  |         OllamaResult result; | ||||||
|  |         if (streamHandler != null) { | ||||||
|  |             request.setStream(true); | ||||||
|  |             result = requestCaller.call(request, streamHandler); | ||||||
|  |         } else { | ||||||
|  |             result = requestCaller.callSync(request); | ||||||
|         } |         } | ||||||
|     OllamaResult result = requestCaller.generateSync(request); |  | ||||||
|         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); |         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -469,24 +526,19 @@ public class OllamaAPI { | |||||||
|         return Base64.getEncoder().encodeToString(bytes); |         return Base64.getEncoder().encodeToString(bytes); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   private static byte[] loadImageBytesFromUrl(String imageUrl) |     private OllamaResult generateSyncForOllamaRequestModel( | ||||||
|       throws IOException, URISyntaxException { |             OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) | ||||||
|     URL url = new URI(imageUrl).toURL(); |  | ||||||
|     try (InputStream in = url.openStream(); |  | ||||||
|         ByteArrayOutputStream out = new ByteArrayOutputStream()) { |  | ||||||
|       byte[] buffer = new byte[1024]; |  | ||||||
|       int bytesRead; |  | ||||||
|       while ((bytesRead = in.read(buffer)) != -1) { |  | ||||||
|         out.write(buffer, 0, bytesRead); |  | ||||||
|       } |  | ||||||
|       return out.toByteArray(); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private OllamaResult generateSyncForOllamaRequestModel(OllamaRequestModel ollamaRequestModel) |  | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); |         OllamaGenerateEndpointCaller requestCaller = | ||||||
|         return requestCaller.generateSync(ollamaRequestModel); |                 new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|  |         OllamaResult result; | ||||||
|  |         if (streamHandler != null) { | ||||||
|  |             ollamaRequestModel.setStream(true); | ||||||
|  |             result = requestCaller.call(ollamaRequestModel, streamHandler); | ||||||
|  |         } else { | ||||||
|  |             result = requestCaller.callSync(ollamaRequestModel); | ||||||
|  |         } | ||||||
|  |         return result; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|   | |||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core; | ||||||
|  |  | ||||||
|  | import java.util.function.Consumer; | ||||||
|  |  | ||||||
|  | public interface OllamaStreamHandler extends Consumer<String>{ | ||||||
|  |     void accept(String message); | ||||||
|  | } | ||||||
| @@ -0,0 +1,14 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.impl; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | public class ConsoleOutputStreamHandler implements OllamaStreamHandler { | ||||||
|  |     private final StringBuffer response = new StringBuffer(); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void accept(String message) { | ||||||
|  |         String substr = message.substring(response.length()); | ||||||
|  |         response.append(substr); | ||||||
|  |         System.out.print(substr); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,6 +1,8 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| @@ -34,4 +36,13 @@ public class Model { | |||||||
|     return name.split(":")[1]; |     return name.split(":")[1]; | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models; | |||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| import java.util.Map; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| @@ -16,5 +17,14 @@ public class ModelDetail { | |||||||
|   private String parameters; |   private String parameters; | ||||||
|   private String template; |   private String template; | ||||||
|   private String system; |   private String system; | ||||||
|   private Map<String, String> details; |   private ModelMeta details; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models; | |||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| @@ -21,4 +23,13 @@ public class ModelMeta { | |||||||
|  |  | ||||||
|   @JsonProperty("quantization_level") |   @JsonProperty("quantization_level") | ||||||
|   private String quantizationLevel; |   private String quantizationLevel; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,6 +1,8 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import java.io.BufferedReader; | import java.io.BufferedReader; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| @@ -22,7 +24,7 @@ import lombok.Getter; | |||||||
| @SuppressWarnings("unused") | @SuppressWarnings("unused") | ||||||
| public class OllamaAsyncResultCallback extends Thread { | public class OllamaAsyncResultCallback extends Thread { | ||||||
|   private final HttpRequest.Builder requestBuilder; |   private final HttpRequest.Builder requestBuilder; | ||||||
|   private final OllamaRequestModel ollamaRequestModel; |   private final OllamaGenerateRequestModel ollamaRequestModel; | ||||||
|   private final Queue<String> queue = new LinkedList<>(); |   private final Queue<String> queue = new LinkedList<>(); | ||||||
|   private String result; |   private String result; | ||||||
|   private boolean isDone; |   private boolean isDone; | ||||||
| @@ -47,7 +49,7 @@ public class OllamaAsyncResultCallback extends Thread { | |||||||
|  |  | ||||||
|   public OllamaAsyncResultCallback( |   public OllamaAsyncResultCallback( | ||||||
|       HttpRequest.Builder requestBuilder, |       HttpRequest.Builder requestBuilder, | ||||||
|       OllamaRequestModel ollamaRequestModel, |       OllamaGenerateRequestModel ollamaRequestModel, | ||||||
|       long requestTimeoutSeconds) { |       long requestTimeoutSeconds) { | ||||||
|     this.requestBuilder = requestBuilder; |     this.requestBuilder = requestBuilder; | ||||||
|     this.ollamaRequestModel = ollamaRequestModel; |     this.ollamaRequestModel = ollamaRequestModel; | ||||||
| @@ -87,8 +89,8 @@ public class OllamaAsyncResultCallback extends Thread { | |||||||
|             queue.add(ollamaResponseModel.getError()); |             queue.add(ollamaResponseModel.getError()); | ||||||
|             responseBuffer.append(ollamaResponseModel.getError()); |             responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|           } else { |           } else { | ||||||
|             OllamaResponseModel ollamaResponseModel = |             OllamaGenerateResponseModel ollamaResponseModel = | ||||||
|                 Utils.getObjectMapper().readValue(line, OllamaResponseModel.class); |                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|             queue.add(ollamaResponseModel.getResponse()); |             queue.add(ollamaResponseModel.getResponse()); | ||||||
|             if (!ollamaResponseModel.isDone()) { |             if (!ollamaResponseModel.isDone()) { | ||||||
|               responseBuffer.append(ollamaResponseModel.getResponse()); |               responseBuffer.append(ollamaResponseModel.getResponse()); | ||||||
|   | |||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonInclude; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @JsonInclude(JsonInclude.Include.NON_NULL) | ||||||
|  | public abstract class OllamaCommonRequestModel { | ||||||
|  |    | ||||||
|  |   protected String model;   | ||||||
|  |   @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) | ||||||
|  |   @JsonProperty(value = "format") | ||||||
|  |   protected Boolean returnFormatJson; | ||||||
|  |   protected Map<String, Object> options; | ||||||
|  |   protected String template; | ||||||
|  |   protected boolean stream; | ||||||
|  |   @JsonProperty(value = "keep_alive") | ||||||
|  |   protected String keepAlive; | ||||||
|  |  | ||||||
|  |    | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -1,39 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; |  | ||||||
|  |  | ||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; |  | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; |  | ||||||
|  |  | ||||||
| import java.util.List; |  | ||||||
| import java.util.Map; |  | ||||||
| import lombok.Data; |  | ||||||
|  |  | ||||||
| @Data |  | ||||||
| public class OllamaRequestModel implements OllamaRequestBody{ |  | ||||||
|  |  | ||||||
|   private String model; |  | ||||||
|   private String prompt; |  | ||||||
|   private List<String> images; |  | ||||||
|   private Map<String, Object> options; |  | ||||||
|  |  | ||||||
|   public OllamaRequestModel(String model, String prompt) { |  | ||||||
|     this.model = model; |  | ||||||
|     this.prompt = prompt; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public OllamaRequestModel(String model, String prompt, List<String> images) { |  | ||||||
|     this.model = model; |  | ||||||
|     this.prompt = prompt; |  | ||||||
|     this.images = images; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public String toString() { |  | ||||||
|     try { |  | ||||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); |  | ||||||
|     } catch (JsonProcessingException e) { |  | ||||||
|       throw new RuntimeException(e); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -3,7 +3,10 @@ package io.github.amithkoujalgi.ollama4j.core.models.chat; | |||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
| import java.io.File; | import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer; | ||||||
|  |  | ||||||
| import java.util.List; | import java.util.List; | ||||||
| import lombok.AllArgsConstructor; | import lombok.AllArgsConstructor; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
| @@ -28,7 +31,8 @@ public class OllamaChatMessage { | |||||||
|     @NonNull |     @NonNull | ||||||
|     private String content; |     private String content; | ||||||
|  |  | ||||||
|     private List<File> images; |     @JsonSerialize(using = FileToBase64Serializer.class) | ||||||
|  |     private List<byte[]> images; | ||||||
|      |      | ||||||
|       @Override |       @Override | ||||||
|   public String toString() { |   public String toString() { | ||||||
|   | |||||||
| @@ -1,16 +1,26 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
| import java.io.File; | import java.io.File; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.net.URISyntaxException; | ||||||
|  | import java.nio.file.Files; | ||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.List; | import java.util.List; | ||||||
|  | import java.util.stream.Collectors; | ||||||
|  |  | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. |  * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. | ||||||
|  */ |  */ | ||||||
| public class OllamaChatRequestBuilder { | public class OllamaChatRequestBuilder { | ||||||
|  |  | ||||||
|  |     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); | ||||||
|  |  | ||||||
|     private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ |     private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ | ||||||
|         request = new OllamaChatRequestModel(model, messages); |         request = new OllamaChatRequestModel(model, messages); | ||||||
|     } |     } | ||||||
| @@ -29,9 +39,41 @@ public class OllamaChatRequestBuilder { | |||||||
|         request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); |         request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, File... images){ |     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){ | ||||||
|         List<OllamaChatMessage> messages = this.request.getMessages(); |         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||||
|         messages.add(new OllamaChatMessage(role,content,List.of(images))); |  | ||||||
|  |         List<byte[]> binaryImages = images.stream().map(file -> { | ||||||
|  |             try { | ||||||
|  |                 return Files.readAllBytes(file.toPath()); | ||||||
|  |             } catch (IOException e) { | ||||||
|  |                 LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e); | ||||||
|  |                 return new byte[0]; | ||||||
|  |             } | ||||||
|  |         }).collect(Collectors.toList()); | ||||||
|  |  | ||||||
|  |         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){ | ||||||
|  |         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||||
|  |         List<byte[]> binaryImages = null; | ||||||
|  |         if(imageUrls.length>0){ | ||||||
|  |             binaryImages = new ArrayList<>(); | ||||||
|  |             for (String imageUrl : imageUrls) { | ||||||
|  |                 try{ | ||||||
|  |                     binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); | ||||||
|  |                 } | ||||||
|  |                     catch (URISyntaxException e){ | ||||||
|  |                         LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e); | ||||||
|  |                 } | ||||||
|  |                 catch (IOException e){ | ||||||
|  |                     LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |          | ||||||
|  |         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||||
|         return this; |         return this; | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -41,12 +83,12 @@ public class OllamaChatRequestBuilder { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     public OllamaChatRequestBuilder withOptions(Options options){ |     public OllamaChatRequestBuilder withOptions(Options options){ | ||||||
|         this.request.setOptions(options); |         this.request.setOptions(options.getOptionsMap()); | ||||||
|         return this; |         return this; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public OllamaChatRequestBuilder withFormat(String format){ |     public OllamaChatRequestBuilder withGetJsonResponse(){ | ||||||
|         this.request.setFormat(format); |         this.request.setReturnFormatJson(true); | ||||||
|         return this; |         return this; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,47 +1,39 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
| import java.util.List; | import java.util.List; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; |  | ||||||
|  |  | ||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
| import lombok.AllArgsConstructor; |  | ||||||
| import lombok.Data; |  | ||||||
| import lombok.NonNull; |  | ||||||
| import lombok.RequiredArgsConstructor; |  | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Defines a Request to use against the ollama /api/chat endpoint. |  * Defines a Request to use against the ollama /api/chat endpoint. | ||||||
|  * |  * | ||||||
|  * @see <a |  * @see <a href= | ||||||
|  *     href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate |  *      "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate | ||||||
|  *      Chat Completion</a> |  *      Chat Completion</a> | ||||||
|  */ |  */ | ||||||
| @Data | @Getter | ||||||
| @AllArgsConstructor | @Setter | ||||||
| @RequiredArgsConstructor | public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody { | ||||||
| public class OllamaChatRequestModel implements OllamaRequestBody { |  | ||||||
|  |  | ||||||
|   @NonNull private String model; |   private List<OllamaChatMessage> messages; | ||||||
|  |  | ||||||
|   @NonNull private List<OllamaChatMessage> messages; |   public OllamaChatRequestModel() {} | ||||||
|  |  | ||||||
|   private String format; |   public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) { | ||||||
|   private Options options; |     this.model = model; | ||||||
|   private String template; |     this.messages = messages; | ||||||
|   private boolean stream; |   } | ||||||
|   private String keepAlive; |  | ||||||
|  |  | ||||||
|   @Override |   @Override | ||||||
|   public String toString() { |   public boolean equals(Object o) { | ||||||
|     try { |     if (!(o instanceof OllamaChatRequestModel)) { | ||||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); |       return false; | ||||||
|     } catch (JsonProcessingException e) { |  | ||||||
|       throw new RuntimeException(e); |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     return this.toString().equals(o.toString()); | ||||||
|   } |   } | ||||||
|  |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,16 +1,18 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
| import java.util.List; | import java.util.List; | ||||||
| import lombok.Data; |  | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| public class OllamaChatResponseModel { | public class OllamaChatResponseModel { | ||||||
|     private String model; |     private String model; | ||||||
|     private @JsonProperty("created_at") String createdAt; |     private @JsonProperty("created_at") String createdAt; | ||||||
|  |     private @JsonProperty("done_reason") String doneReason; | ||||||
|     private OllamaChatMessage message; |     private OllamaChatMessage message; | ||||||
|     private boolean done; |     private boolean done; | ||||||
|  |     private String error; | ||||||
|     private List<Integer> context; |     private List<Integer> context; | ||||||
|     private @JsonProperty("total_duration") Long totalDuration; |     private @JsonProperty("total_duration") Long totalDuration; | ||||||
|     private @JsonProperty("load_duration") Long loadDuration; |     private @JsonProperty("load_duration") Long loadDuration; | ||||||
|   | |||||||
| @@ -0,0 +1,31 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | public class OllamaChatStreamObserver { | ||||||
|  |  | ||||||
|  |     private OllamaStreamHandler streamHandler; | ||||||
|  |  | ||||||
|  |     private List<OllamaChatResponseModel> responseParts = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |     private String message = ""; | ||||||
|  |  | ||||||
|  |     public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) { | ||||||
|  |         this.streamHandler = streamHandler; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void notify(OllamaChatResponseModel currentResponsePart){ | ||||||
|  |         responseParts.add(currentResponsePart); | ||||||
|  |         handleCurrentResponsePart(currentResponsePart); | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){ | ||||||
|  |         message = message + currentResponsePart.getMessage().getContent(); | ||||||
|  |         streamHandler.accept(message); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
| 
 | 
 | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| 
 | 
 | ||||||
| @@ -7,7 +7,7 @@ import lombok.Data; | |||||||
| 
 | 
 | ||||||
| @SuppressWarnings("unused") | @SuppressWarnings("unused") | ||||||
| @Data | @Data | ||||||
| public class EmbeddingResponse { | public class OllamaEmbeddingResponseModel { | ||||||
|     @JsonProperty("embedding") |     @JsonProperty("embedding") | ||||||
|     private List<Double> embedding; |     private List<Double> embedding; | ||||||
| } | } | ||||||
| @@ -0,0 +1,31 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  |  | ||||||
|  | public class OllamaEmbeddingsRequestBuilder { | ||||||
|  |  | ||||||
|  |     private OllamaEmbeddingsRequestBuilder(String model, String prompt){ | ||||||
|  |         request = new OllamaEmbeddingsRequestModel(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private OllamaEmbeddingsRequestModel request; | ||||||
|  |  | ||||||
|  |     public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){ | ||||||
|  |         return new OllamaEmbeddingsRequestBuilder(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestModel build(){ | ||||||
|  |         return request; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestBuilder withOptions(Options options){ | ||||||
|  |         this.request.setOptions(options.getOptionsMap()); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){ | ||||||
|  |         this.request.setKeepAlive(keepAlive); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,33 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
|  |  | ||||||
|  | import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||||
|  | import java.util.Map; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  | import lombok.NonNull; | ||||||
|  | import lombok.RequiredArgsConstructor; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @RequiredArgsConstructor | ||||||
|  | @NoArgsConstructor | ||||||
|  | public class OllamaEmbeddingsRequestModel { | ||||||
|  |   @NonNull | ||||||
|  |   private String model; | ||||||
|  |   @NonNull | ||||||
|  |   private String prompt; | ||||||
|  |  | ||||||
|  |   protected Map<String, Object> options; | ||||||
|  |   @JsonProperty(value = "keep_alive") | ||||||
|  |   private String keepAlive; | ||||||
|  |  | ||||||
|  |   @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,55 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}  | ||||||
|  |  * objects using the builder-pattern. | ||||||
|  |  */ | ||||||
|  | public class OllamaGenerateRequestBuilder { | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestBuilder(String model, String prompt){ | ||||||
|  |         request = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestModel request; | ||||||
|  |  | ||||||
|  |     public static OllamaGenerateRequestBuilder getInstance(String model){ | ||||||
|  |         return new OllamaGenerateRequestBuilder(model,""); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestModel build(){ | ||||||
|  |         return request; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withPrompt(String prompt){ | ||||||
|  |         request.setPrompt(prompt); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     public OllamaGenerateRequestBuilder withGetJsonResponse(){ | ||||||
|  |         this.request.setReturnFormatJson(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withOptions(Options options){ | ||||||
|  |         this.request.setOptions(options.getOptionsMap()); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withTemplate(String template){ | ||||||
|  |         this.request.setTemplate(template); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withStreaming(){ | ||||||
|  |         this.request.setStream(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){ | ||||||
|  |         this.request.setKeepAlive(keepAlive); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,46 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
|  |  | ||||||
|  | @Getter | ||||||
|  | @Setter | ||||||
|  | public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{ | ||||||
|  |  | ||||||
|  |   private String prompt; | ||||||
|  |   private List<String> images; | ||||||
|  |  | ||||||
|  |   private String system; | ||||||
|  |   private String context; | ||||||
|  |   private boolean raw; | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel() { | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel(String model, String prompt) { | ||||||
|  |     this.model = model; | ||||||
|  |     this.prompt = prompt; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel(String model, String prompt, List<String> images) { | ||||||
|  |     this.model = model; | ||||||
|  |     this.prompt = prompt; | ||||||
|  |     this.images = images; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public boolean equals(Object o) { | ||||||
|  |     if (!(o instanceof OllamaGenerateRequestModel)) { | ||||||
|  |       return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return this.toString().equals(o.toString()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
| 
 | 
 | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| @@ -8,7 +8,7 @@ import lombok.Data; | |||||||
| 
 | 
 | ||||||
| @Data | @Data | ||||||
| @JsonIgnoreProperties(ignoreUnknown = true) | @JsonIgnoreProperties(ignoreUnknown = true) | ||||||
| public class OllamaResponseModel { | public class OllamaGenerateResponseModel { | ||||||
|     private String model; |     private String model; | ||||||
|     private @JsonProperty("created_at") String createdAt; |     private @JsonProperty("created_at") String createdAt; | ||||||
|     private String response; |     private String response; | ||||||
| @@ -0,0 +1,31 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | public class OllamaGenerateStreamObserver { | ||||||
|  |  | ||||||
|  |     private OllamaStreamHandler streamHandler; | ||||||
|  |  | ||||||
|  |     private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |     private String message = ""; | ||||||
|  |  | ||||||
|  |     public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) { | ||||||
|  |         this.streamHandler = streamHandler; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void notify(OllamaGenerateResponseModel currentResponsePart){ | ||||||
|  |         responseParts.add(currentResponsePart); | ||||||
|  |         handleCurrentResponsePart(currentResponsePart); | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){ | ||||||
|  |         message = message + currentResponsePart.getResponse(); | ||||||
|  |         streamHandler.accept(message); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; |  | ||||||
|  |  | ||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; |  | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; |  | ||||||
| import lombok.AllArgsConstructor; |  | ||||||
| import lombok.Data; |  | ||||||
|  |  | ||||||
| @Data |  | ||||||
| @AllArgsConstructor |  | ||||||
| public class ModelEmbeddingsRequest { |  | ||||||
|   private String model; |  | ||||||
|   private String prompt; |  | ||||||
|  |  | ||||||
|   @Override |  | ||||||
|   public String toString() { |  | ||||||
|     try { |  | ||||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); |  | ||||||
|     } catch (JsonProcessingException e) { |  | ||||||
|       throw new RuntimeException(e); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -1,21 +1,28 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import org.slf4j.Logger; | import org.slf4j.Logger; | ||||||
| import org.slf4j.LoggerFactory; | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import java.io.IOException; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; |  | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Specialization class for requests |  * Specialization class for requests | ||||||
|  */ |  */ | ||||||
| public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ | public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||||
|  |  | ||||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); |     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); | ||||||
|  |  | ||||||
|  |     private OllamaChatStreamObserver streamObserver; | ||||||
|  |  | ||||||
|     public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { |     public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose); |         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|     } |     } | ||||||
| @@ -30,15 +37,19 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ | |||||||
|         try { |         try { | ||||||
|             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); |             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||||
|             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); |             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); | ||||||
|  |             if (streamObserver != null) { | ||||||
|  |                 streamObserver.notify(ollamaResponseModel); | ||||||
|  |             } | ||||||
|             return ollamaResponseModel.isDone(); |             return ollamaResponseModel.isDone(); | ||||||
|         } catch (JsonProcessingException e) { |         } catch (JsonProcessingException e) { | ||||||
|                     LOG.error("Error parsing the Ollama chat response!",e); |             LOG.error("Error parsing the Ollama chat response!", e); | ||||||
|             return true; |             return true; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|      |         streamObserver = new OllamaChatStreamObserver(streamHandler); | ||||||
|  |         return super.callSync(body); | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,5 +1,15 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import java.io.BufferedReader; | import java.io.BufferedReader; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.io.InputStream; | import java.io.InputStream; | ||||||
| @@ -12,17 +22,6 @@ import java.nio.charset.StandardCharsets; | |||||||
| import java.time.Duration; | import java.time.Duration; | ||||||
| import java.util.Base64; | import java.util.Base64; | ||||||
|  |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; |  | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Abstract helperclass to call the ollama api server. |  * Abstract helperclass to call the ollama api server. | ||||||
|  */ |  */ | ||||||
| @@ -56,8 +55,7 @@ public abstract class OllamaEndpointCaller { | |||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|     public OllamaResult generateSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |  | ||||||
|         // Create Request |         // Create Request | ||||||
|         long startTime = System.currentTimeMillis(); |         long startTime = System.currentTimeMillis(); | ||||||
|         HttpClient httpClient = HttpClient.newHttpClient(); |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
| @@ -71,7 +69,6 @@ public abstract class OllamaEndpointCaller { | |||||||
|         HttpResponse<InputStream> response = |         HttpResponse<InputStream> response = | ||||||
|                 httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); |                 httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||||
|  |  | ||||||
|          |  | ||||||
|         int statusCode = response.statusCode(); |         int statusCode = response.statusCode(); | ||||||
|         InputStream responseBodyStream = response.body(); |         InputStream responseBodyStream = response.body(); | ||||||
|         StringBuilder responseBuffer = new StringBuilder(); |         StringBuilder responseBuffer = new StringBuilder(); | ||||||
| @@ -90,8 +87,13 @@ public abstract class OllamaEndpointCaller { | |||||||
|                             Utils.getObjectMapper() |                             Utils.getObjectMapper() | ||||||
|                                     .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); |                                     .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); | ||||||
|                     responseBuffer.append(ollamaResponseModel.getError()); |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                 } else if (statusCode == 400) { | ||||||
|  |                     LOG.warn("Status code: 400 (Bad Request)"); | ||||||
|  |                     OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, | ||||||
|  |                             OllamaErrorResponseModel.class); | ||||||
|  |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|                 } else { |                 } else { | ||||||
|           boolean finished = parseResponseAndAddToBuffer(line,responseBuffer); |                     boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); | ||||||
|                     if (finished) { |                     if (finished) { | ||||||
|                         break; |                         break; | ||||||
|                     } |                     } | ||||||
|   | |||||||
| @@ -1,18 +1,25 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
| import org.slf4j.Logger; | import org.slf4j.Logger; | ||||||
| import org.slf4j.LoggerFactory; | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResponseModel; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  |  | ||||||
| public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | ||||||
|  |  | ||||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); |     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); | ||||||
|  |  | ||||||
|  |     private OllamaGenerateStreamObserver streamObserver; | ||||||
|  |  | ||||||
|     public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { |     public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose);    |         super(host, basicAuth, requestTimeoutSeconds, verbose);    | ||||||
|     } |     } | ||||||
| @@ -25,8 +32,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | |||||||
|     @Override |     @Override | ||||||
|     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { |     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||||
|                 try { |                 try { | ||||||
|                     OllamaResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaResponseModel.class); |                     OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|                     responseBuffer.append(ollamaResponseModel.getResponse()); |                     responseBuffer.append(ollamaResponseModel.getResponse()); | ||||||
|  |                     if(streamObserver != null) { | ||||||
|  |                         streamObserver.notify(ollamaResponseModel); | ||||||
|  |                     } | ||||||
|                     return ollamaResponseModel.isDone(); |                     return ollamaResponseModel.isDone(); | ||||||
|                 } catch (JsonProcessingException e) { |                 } catch (JsonProcessingException e) { | ||||||
|                     LOG.error("Error parsing the Ollama chat response!",e); |                     LOG.error("Error parsing the Ollama chat response!",e); | ||||||
| @@ -34,7 +44,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | |||||||
|                 }          |                 }          | ||||||
|     } |     } | ||||||
|  |  | ||||||
|      |     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||||
|  |         throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |     streamObserver = new OllamaGenerateStreamObserver(streamHandler); | ||||||
|  |     return super.callSync(body); | ||||||
|  |     } | ||||||
|      |      | ||||||
|      |      | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,57 +8,75 @@ package io.github.amithkoujalgi.ollama4j.core.types; | |||||||
|  */ |  */ | ||||||
| @SuppressWarnings("ALL") | @SuppressWarnings("ALL") | ||||||
| public class OllamaModelType { | public class OllamaModelType { | ||||||
|  |     public static final String GEMMA = "gemma"; | ||||||
|     public static final String LLAMA2 = "llama2"; |     public static final String LLAMA2 = "llama2"; | ||||||
|  |     public static final String LLAMA3 = "llama3"; | ||||||
|     public static final String MISTRAL = "mistral"; |     public static final String MISTRAL = "mistral"; | ||||||
|   public static final String LLAVA = "llava"; |  | ||||||
|     public static final String MIXTRAL = "mixtral"; |     public static final String MIXTRAL = "mixtral"; | ||||||
|   public static final String STARLING_LM = "starling-lm"; |     public static final String LLAVA = "llava"; | ||||||
|  |     public static final String LLAVA_PHI3 = "llava-phi3"; | ||||||
|     public static final String NEURAL_CHAT = "neural-chat"; |     public static final String NEURAL_CHAT = "neural-chat"; | ||||||
|     public static final String CODELLAMA = "codellama"; |     public static final String CODELLAMA = "codellama"; | ||||||
|   public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; |  | ||||||
|     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; |     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; | ||||||
|  |     public static final String MISTRAL_OPENORCA = "mistral-openorca"; | ||||||
|  |     public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; | ||||||
|  |     public static final String PHI = "phi"; | ||||||
|  |     public static final String PHI3 = "phi3"; | ||||||
|     public static final String ORCA_MINI = "orca-mini"; |     public static final String ORCA_MINI = "orca-mini"; | ||||||
|  |     public static final String DEEPSEEK_CODER = "deepseek-coder"; | ||||||
|  |     public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; | ||||||
|     public static final String VICUNA = "vicuna"; |     public static final String VICUNA = "vicuna"; | ||||||
|     public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; |     public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; | ||||||
|   public static final String PHIND_CODELLAMA = "phind-codellama"; |  | ||||||
|   public static final String PHI = "phi"; |  | ||||||
|     public static final String ZEPHYR = "zephyr"; |     public static final String ZEPHYR = "zephyr"; | ||||||
|  |     public static final String OPENHERMES = "openhermes"; | ||||||
|  |     public static final String QWEN = "qwen"; | ||||||
|     public static final String WIZARDCODER = "wizardcoder"; |     public static final String WIZARDCODER = "wizardcoder"; | ||||||
|   public static final String MISTRAL_OPENORCA = "mistral-openorca"; |  | ||||||
|   public static final String NOUS_HERMES = "nous-hermes"; |  | ||||||
|   public static final String DEEPSEEK_CODER = "deepseek-coder"; |  | ||||||
|   public static final String WIZARD_MATH = "wizard-math"; |  | ||||||
|     public static final String LLAMA2_CHINESE = "llama2-chinese"; |     public static final String LLAMA2_CHINESE = "llama2-chinese"; | ||||||
|   public static final String FALCON = "falcon"; |     public static final String TINYLLAMA = "tinyllama"; | ||||||
|   public static final String ORCA2 = "orca2"; |     public static final String PHIND_CODELLAMA = "phind-codellama"; | ||||||
|   public static final String STABLE_BELUGA = "stable-beluga"; |  | ||||||
|   public static final String CODEUP = "codeup"; |  | ||||||
|   public static final String EVERYTHINGLM = "everythinglm"; |  | ||||||
|   public static final String MEDLLAMA2 = "medllama2"; |  | ||||||
|   public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; |  | ||||||
|   public static final String STARCODER = "starcoder"; |  | ||||||
|   public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral"; |  | ||||||
|     public static final String OPENCHAT = "openchat"; |     public static final String OPENCHAT = "openchat"; | ||||||
|   public static final String WIZARD_VICUNA = "wizard-vicuna"; |     public static final String ORCA2 = "orca2"; | ||||||
|   public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral"; |     public static final String FALCON = "falcon"; | ||||||
|   public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; |     public static final String WIZARD_MATH = "wizard-math"; | ||||||
|  |     public static final String TINYDOLPHIN = "tinydolphin"; | ||||||
|  |     public static final String NOUS_HERMES = "nous-hermes"; | ||||||
|     public static final String YI = "yi"; |     public static final String YI = "yi"; | ||||||
|   public static final String YARN_MISTRAL = "yarn-mistral"; |     public static final String DOLPHIN_PHI = "dolphin-phi"; | ||||||
|   public static final String SAMANTHA_MISTRAL = "samantha-mistral"; |     public static final String STARLING_LM = "starling-lm"; | ||||||
|   public static final String SQLCODER = "sqlcoder"; |     public static final String STARCODER = "starcoder"; | ||||||
|   public static final String YARN_LLAMA2 = "yarn-llama2"; |     public static final String CODEUP = "codeup"; | ||||||
|   public static final String MEDITRON = "meditron"; |     public static final String MEDLLAMA2 = "medllama2"; | ||||||
|   public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; |     public static final String STABLE_CODE = "stable-code"; | ||||||
|   public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral"; |     public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; | ||||||
|   public static final String DEEPSEEK_LLM = "deepseek-llm"; |  | ||||||
|   public static final String MISTRALLITE = "mistrallite"; |  | ||||||
|   public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral"; |  | ||||||
|   public static final String WIZARDLM = "wizardlm"; |  | ||||||
|   public static final String CODEBOOGA = "codebooga"; |  | ||||||
|   public static final String MAGICODER = "magicoder"; |  | ||||||
|   public static final String GOLIATH = "goliath"; |  | ||||||
|   public static final String NEXUSRAVEN = "nexusraven"; |  | ||||||
|   public static final String ALFRED = "alfred"; |  | ||||||
|   public static final String XWINLM = "xwinlm"; |  | ||||||
|     public static final String BAKLLAVA = "bakllava"; |     public static final String BAKLLAVA = "bakllava"; | ||||||
|  |     public static final String EVERYTHINGLM = "everythinglm"; | ||||||
|  |     public static final String SOLAR = "solar"; | ||||||
|  |     public static final String STABLE_BELUGA = "stable-beluga"; | ||||||
|  |     public static final String SQLCODER = "sqlcoder"; | ||||||
|  |     public static final String YARN_MISTRAL = "yarn-mistral"; | ||||||
|  |     public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; | ||||||
|  |     public static final String SAMANTHA_MISTRAL = "samantha-mistral"; | ||||||
|  |     public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; | ||||||
|  |     public static final String MEDITRON = "meditron"; | ||||||
|  |     public static final String WIZARD_VICUNA = "wizard-vicuna"; | ||||||
|  |     public static final String STABLELM2 = "stablelm2"; | ||||||
|  |     public static final String MAGICODER = "magicoder"; | ||||||
|  |     public static final String YARN_LLAMA2 = "yarn-llama2"; | ||||||
|  |     public static final String NOUS_HERMES2 = "nous-hermes2"; | ||||||
|  |     public static final String DEEPSEEK_LLM = "deepseek-llm"; | ||||||
|  |     public static final String LLAMA_PRO = "llama-pro"; | ||||||
|  |     public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; | ||||||
|  |     public static final String CODEBOOGA = "codebooga"; | ||||||
|  |     public static final String MISTRALLITE = "mistrallite"; | ||||||
|  |     public static final String NEXUSRAVEN = "nexusraven"; | ||||||
|  |     public static final String GOLIATH = "goliath"; | ||||||
|  |     public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; | ||||||
|  |     public static final String NOTUX = "notux"; | ||||||
|  |     public static final String ALFRED = "alfred"; | ||||||
|  |     public static final String MEGADOLPHIN = "megadolphin"; | ||||||
|  |     public static final String WIZARDLM = "wizardlm"; | ||||||
|  |     public static final String XWINLM = "xwinlm"; | ||||||
|  |     public static final String NOTUS = "notus"; | ||||||
|  |     public static final String DUCKDB_NSQL = "duckdb-nsql"; | ||||||
|  |     public static final String ALL_MINILM = "all-minilm"; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,21 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonGenerator; | ||||||
|  | import com.fasterxml.jackson.databind.JsonSerializer; | ||||||
|  | import com.fasterxml.jackson.databind.SerializerProvider; | ||||||
|  |  | ||||||
|  | public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{ | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException { | ||||||
|  |             gen.writeString("json"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public boolean isEmpty(SerializerProvider provider,Boolean value){ | ||||||
|  |         return !value; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,21 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.Base64; | ||||||
|  | import java.util.Collection; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonGenerator; | ||||||
|  | import com.fasterxml.jackson.databind.JsonSerializer; | ||||||
|  | import com.fasterxml.jackson.databind.SerializerProvider; | ||||||
|  |  | ||||||
|  | public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> { | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException { | ||||||
|  |         jsonGenerator.writeStartArray(); | ||||||
|  |         for (byte[] file : value) { | ||||||
|  |             jsonGenerator.writeString(Base64.getEncoder().encodeToString(file)); | ||||||
|  |         } | ||||||
|  |         jsonGenerator.writeEndArray(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,9 +1,30 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.utils; | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.ByteArrayOutputStream; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.io.InputStream; | ||||||
|  | import java.net.URI; | ||||||
|  | import java.net.URISyntaxException; | ||||||
|  | import java.net.URL; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.databind.ObjectMapper; | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
|  |  | ||||||
| public class Utils { | public class Utils { | ||||||
|   public static ObjectMapper getObjectMapper() { |   public static ObjectMapper getObjectMapper() { | ||||||
|     return new ObjectMapper(); |     return new ObjectMapper(); | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   public static byte[] loadImageBytesFromUrl(String imageUrl) | ||||||
|  |       throws IOException, URISyntaxException { | ||||||
|  |     URL url = new URI(imageUrl).toURL(); | ||||||
|  |     try (InputStream in = url.openStream(); | ||||||
|  |         ByteArrayOutputStream out = new ByteArrayOutputStream()) { | ||||||
|  |       byte[] buffer = new byte[1024]; | ||||||
|  |       int bytesRead; | ||||||
|  |       while ((bytesRead = in.read(buffer)) != -1) { | ||||||
|  |         out.write(buffer, 0, bytesRead); | ||||||
|  |       } | ||||||
|  |       return out.toByteArray(); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -4,11 +4,14 @@ import static org.junit.jupiter.api.Assertions.*; | |||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
| import java.io.File; | import java.io.File; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| @@ -23,8 +26,13 @@ import lombok.Data; | |||||||
| import org.junit.jupiter.api.BeforeEach; | import org.junit.jupiter.api.BeforeEach; | ||||||
| import org.junit.jupiter.api.Order; | import org.junit.jupiter.api.Order; | ||||||
| import org.junit.jupiter.api.Test; | import org.junit.jupiter.api.Test; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| class TestRealAPIs { | class TestRealAPIs { | ||||||
|  |  | ||||||
|  |   private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); | ||||||
|  |  | ||||||
|   OllamaAPI ollamaAPI; |   OllamaAPI ollamaAPI; | ||||||
|   Config config; |   Config config; | ||||||
|  |  | ||||||
| @@ -55,7 +63,7 @@ class TestRealAPIs { | |||||||
|     } catch (HttpConnectTimeoutException e) { |     } catch (HttpConnectTimeoutException e) { | ||||||
|       fail(e.getMessage()); |       fail(e.getMessage()); | ||||||
|     } catch (Exception e) { |     } catch (Exception e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -67,7 +75,7 @@ class TestRealAPIs { | |||||||
|       assertNotNull(ollamaAPI.listModels()); |       assertNotNull(ollamaAPI.listModels()); | ||||||
|       ollamaAPI.listModels().forEach(System.out::println); |       ollamaAPI.listModels().forEach(System.out::println); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -82,7 +90,20 @@ class TestRealAPIs { | |||||||
|               .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); |               .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); | ||||||
|       assertTrue(found); |       assertTrue(found); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testListDtails() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |       ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); | ||||||
|  |       assertNotNull(modelDetails); | ||||||
|  |       System.out.println(modelDetails); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|  |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -100,7 +121,33 @@ class TestRealAPIs { | |||||||
|       assertNotNull(result.getResponse()); |       assertNotNull(result.getResponse()); | ||||||
|       assertFalse(result.getResponse().isEmpty()); |       assertFalse(result.getResponse().isEmpty()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testAskModelWithDefaultOptionsStreamed() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |  | ||||||
|  |       StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|  |       OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||||
|  |           "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |           new OptionsBuilder().build(), (s) -> { | ||||||
|  |             LOG.info(s); | ||||||
|  |             String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |             LOG.info(substring); | ||||||
|  |             sb.append(substring); | ||||||
|  |           }); | ||||||
|  |  | ||||||
|  |       assertNotNull(result); | ||||||
|  |       assertNotNull(result.getResponse()); | ||||||
|  |       assertFalse(result.getResponse().isEmpty()); | ||||||
|  |       assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -118,7 +165,7 @@ class TestRealAPIs { | |||||||
|       assertNotNull(result.getResponse()); |       assertNotNull(result.getResponse()); | ||||||
|       assertFalse(result.getResponse().isEmpty()); |       assertFalse(result.getResponse().isEmpty()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -138,7 +185,7 @@ class TestRealAPIs { | |||||||
|       assertFalse(chatResult.getResponse().isBlank()); |       assertFalse(chatResult.getResponse().isBlank()); | ||||||
|       assertEquals(4,chatResult.getChatHistory().size()); |       assertEquals(4,chatResult.getChatHistory().size()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -148,17 +195,92 @@ class TestRealAPIs { | |||||||
|     testEndpointReachability(); |     testEndpointReachability(); | ||||||
|     try { |     try { | ||||||
|       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); |       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") |       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, | ||||||
|              .withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?") |           "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||||
|  |           .withMessage(OllamaChatMessageRole.USER, | ||||||
|  |               "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|           .build(); |           .build(); | ||||||
|  |  | ||||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); |       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|       assertNotNull(chatResult); |       assertNotNull(chatResult); | ||||||
|       assertFalse(chatResult.getResponse().isBlank()); |       assertFalse(chatResult.getResponse().isBlank()); | ||||||
|       assertTrue(chatResult.getResponse().startsWith("NI")); |       assertTrue(chatResult.getResponse().startsWith("NI")); | ||||||
|       assertEquals(3,chatResult.getChatHistory().size()); |       assertEquals(3, chatResult.getChatHistory().size()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testChatWithStream() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||||
|  |               "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |           .build(); | ||||||
|  |  | ||||||
|  |       StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|  |       OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> { | ||||||
|  |         LOG.info(s); | ||||||
|  |         String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |         LOG.info(substring); | ||||||
|  |         sb.append(substring); | ||||||
|  |       }); | ||||||
|  |       assertNotNull(chatResult); | ||||||
|  |       assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testChatWithImageFromFileWithHistoryRecognition() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |       OllamaChatRequestBuilder builder = | ||||||
|  |           OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||||
|  |       OllamaChatRequestModel requestModel = | ||||||
|  |           builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |               List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||||
|  |  | ||||||
|  |       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |       assertNotNull(chatResult); | ||||||
|  |       assertNotNull(chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |       builder.reset(); | ||||||
|  |  | ||||||
|  |       requestModel = | ||||||
|  |           builder.withMessages(chatResult.getChatHistory()) | ||||||
|  |             .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||||
|  |  | ||||||
|  |       chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |       assertNotNull(chatResult); | ||||||
|  |       assertNotNull(chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testChatWithImageFromURL() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||||
|  |       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |       "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") | ||||||
|  |              .build(); | ||||||
|  |  | ||||||
|  |       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |       assertNotNull(chatResult); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -178,7 +300,31 @@ class TestRealAPIs { | |||||||
|       assertNotNull(result.getResponse()); |       assertNotNull(result.getResponse()); | ||||||
|       assertFalse(result.getResponse().isEmpty()); |       assertFalse(result.getResponse().isEmpty()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   void testAskModelWithOptionsAndImageFilesStreamed() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||||
|  |     try { | ||||||
|  |       StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|  |       OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), | ||||||
|  |           "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { | ||||||
|  |             LOG.info(s); | ||||||
|  |             String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |             LOG.info(substring); | ||||||
|  |             sb.append(substring); | ||||||
|  |           }); | ||||||
|  |       assertNotNull(result); | ||||||
|  |       assertNotNull(result.getResponse()); | ||||||
|  |       assertFalse(result.getResponse().isEmpty()); | ||||||
|  |       assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -198,7 +344,24 @@ class TestRealAPIs { | |||||||
|       assertNotNull(result.getResponse()); |       assertNotNull(result.getResponse()); | ||||||
|       assertFalse(result.getResponse().isEmpty()); |       assertFalse(result.getResponse().isEmpty()); | ||||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |       fail(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Test | ||||||
|  |   @Order(3) | ||||||
|  |   public void testEmbedding() { | ||||||
|  |     testEndpointReachability(); | ||||||
|  |     try { | ||||||
|  |       OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder | ||||||
|  |           .getInstance(config.getModel(), "What is the capital of France?").build(); | ||||||
|  |  | ||||||
|  |       List<Double> embeddings = ollamaAPI.generateEmbeddings(request); | ||||||
|  |  | ||||||
|  |       assertNotNull(embeddings); | ||||||
|  |       assertFalse(embeddings.isEmpty()); | ||||||
|  |     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |       fail(e); | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  | import static org.junit.jupiter.api.Assertions.fail; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  |  | ||||||
|  | public abstract class AbstractRequestSerializationTest<T> { | ||||||
|  |  | ||||||
|  |     protected ObjectMapper mapper = Utils.getObjectMapper(); | ||||||
|  |  | ||||||
|  |     protected String serializeRequest(T req) { | ||||||
|  |         try { | ||||||
|  |             return mapper.writeValueAsString(req); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             fail("Could not serialize request!", e); | ||||||
|  |             return null; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected T deserializeRequest(String jsonRequest, Class<T> requestClass) { | ||||||
|  |         try { | ||||||
|  |             return mapper.readValue(jsonRequest, requestClass); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             fail("Could not deserialize jsonRequest!", e); | ||||||
|  |             return null; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest, | ||||||
|  |         T req) { | ||||||
|  |         assertEquals(req, unmarshalledRequest); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  |  | ||||||
|  | import java.io.File; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import org.json.JSONObject; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{ | ||||||
|  |  | ||||||
|  |     private OllamaChatRequestBuilder builder; | ||||||
|  |  | ||||||
|  |     @BeforeEach | ||||||
|  |     public void init() { | ||||||
|  |         builder = OllamaChatRequestBuilder.getInstance("DummyModel"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestMultipleMessages() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") | ||||||
|  |         .withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |         .build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithMessageAndImage() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", | ||||||
|  |                 List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithOptions() { | ||||||
|  |         OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |             .withOptions(b.setMirostat(1).build()) | ||||||
|  |             .withOptions(b.setTemperature(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatEta(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatTau(1L).build()) | ||||||
|  |             .withOptions(b.setNumGpu(1).build()) | ||||||
|  |             .withOptions(b.setSeed(1).build()) | ||||||
|  |             .withOptions(b.setTopK(1).build()) | ||||||
|  |             .withOptions(b.setTopP(1).build()) | ||||||
|  |             .build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("num_gpu")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("seed")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("top_k")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("top_p")); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithJsonFormat() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |                 .withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|  |         // of request is never used in real code anyways | ||||||
|  |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|  |         String requestFormatProperty = jsonObject.getString("format"); | ||||||
|  |         assertEquals("json", requestFormatProperty); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithTemplate() { | ||||||
|  |         OllamaChatRequestModel req = builder.withTemplate("System Template") | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithStreaming() { | ||||||
|  |         OllamaChatRequestModel req = builder.withStreaming().build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithKeepAlive() { | ||||||
|  |         String expectedKeepAlive = "5m"; | ||||||
|  |         OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,37 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{ | ||||||
|  |  | ||||||
|  |         private OllamaEmbeddingsRequestBuilder builder; | ||||||
|  |  | ||||||
|  |         @BeforeEach | ||||||
|  |         public void init() { | ||||||
|  |             builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |             @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaEmbeddingsRequestModel req = builder.build(); | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |         @Test | ||||||
|  |         public void testRequestWithOptions() { | ||||||
|  |             OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |             OllamaEmbeddingsRequestModel req = builder | ||||||
|  |                     .withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|  |             String jsonRequest = serializeRequest(req); | ||||||
|  |             OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class); | ||||||
|  |             assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |             assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |         } | ||||||
|  | } | ||||||
| @@ -0,0 +1,56 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  |  | ||||||
|  | import org.json.JSONObject; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{ | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestBuilder builder; | ||||||
|  |  | ||||||
|  |     @BeforeEach | ||||||
|  |     public void init() { | ||||||
|  |         builder = OllamaGenerateRequestBuilder.getInstance("DummyModel"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithOptions() { | ||||||
|  |         OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |         OllamaGenerateRequestModel req = | ||||||
|  |                 builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithJsonFormat() { | ||||||
|  |         OllamaGenerateRequestModel req = | ||||||
|  |                 builder.withPrompt("Some prompt").withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serializeRequest(req); | ||||||
|  |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|  |         // of request is never used in real code anyways | ||||||
|  |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|  |         String requestFormatProperty = jsonObject.getString("format"); | ||||||
|  |         assertEquals("json", requestFormatProperty); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
		Reference in New Issue
	
	Block a user