mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-10-31 16:40:41 +01:00 
			
		
		
		
	Compare commits
	
		
			25 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 4a91918e84 | ||
|   | ff3344616c | ||
|   | 726fea5b74 | ||
|   | a09f1362e9 | ||
|   | 4ef0821932 | ||
|   | 2d3cf228cb | ||
|   | 5b3713c69e | ||
|   | e9486cbb8e | ||
|   | 057f0babeb | ||
|   | da146640ca | ||
|   | 82be761b86 | ||
|   | 9c3fc49df1 | ||
|   | 5f19eb17ac | ||
|   | ecb04d6d82 | ||
|   | 3fc7e9423c | ||
|   | 405a08b330 | ||
|   | 921f745435 | ||
|   | bedfec6bf9 | ||
|   | afa09e87a5 | ||
|   | baf2320ea6 | ||
|   | 948a7444fb | ||
|   | ec0eb8b469 | ||
|   | 8f33de7e59 | ||
|   | 8c59e6511b | ||
|   | b93fc7623a | 
							
								
								
									
										4
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Makefile
									
									
									
									
									
								
							| @@ -11,9 +11,9 @@ doxygen: | ||||
| 	doxygen Doxyfile | ||||
|  | ||||
| list-releases: | ||||
| 	curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \ | ||||
| 	curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \ | ||||
|       --compressed \ | ||||
|       --silent | jq '.components[].version' | ||||
|       --silent | jq -r '.components[].version' | ||||
|  | ||||
| build-docs: | ||||
| 	npm i --prefix docs && npm run build --prefix docs | ||||
|   | ||||
							
								
								
									
										34
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										34
									
								
								README.md
									
									
									
									
									
								
							| @@ -9,7 +9,6 @@ A Java library (wrapper/binding) for Ollama server. | ||||
|  | ||||
| Find more details on the [website](https://ollama4j.github.io/ollama4j/). | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -154,7 +153,7 @@ In your Maven project, add this dependency: | ||||
| <dependency> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>1.0.79</version> | ||||
|     <version>1.0.89</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| @@ -210,7 +209,7 @@ In your Maven project, add this dependency: | ||||
| <dependency> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>1.0.79</version> | ||||
|     <version>1.0.89</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| @@ -268,27 +267,22 @@ make integration-tests | ||||
| Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch. | ||||
|  | ||||
| ## ⭐ Give us a Star! | ||||
|  | ||||
| If you like or are using this project to build your own, please give us a star. It's a free way to show your support. | ||||
|  | ||||
| ## Who's using Ollama4j? | ||||
|  | ||||
| - `Datafaker`: a library to generate fake data | ||||
|     - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api | ||||
| - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j | ||||
|     - https://github.com/TEAMPB/ollama4j-vaadin-ui | ||||
| - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the | ||||
|   server to translate all messages into a specfic target language. | ||||
|     - https://github.com/liebki/ollama-translator | ||||
| - `Another Minecraft Mod`: https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/ | ||||
| - `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and | ||||
|   Ollama4j. | ||||
|     - https://github.com/ollama4j/ollama4j-web-ui | ||||
| - `JnsCLI`: A command-line tool for Jenkins that manages jobs, builds, and configurations directly from the terminal while offering AI-powered error analysis for quick troubleshooting. | ||||
|     -  https://github.com/mirum8/jnscli | ||||
| - `Katie Backend`: An Open Source AI-based question-answering platform that helps companies and organizations make their private domain knowledge accessible and useful to their employees and customers. | ||||
|     - https://github.com/wyona/katie-backend | ||||
| - `TeleLlama3 Bot`: A Question-Answering Telegram Bot. | ||||
|     - https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot | ||||
| | # | Project Name      | Description                                                                                                   | Link                                                                                                                                           | | ||||
| |---|-------------------|---------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------| | ||||
| | 1 | Datafaker         | A library to generate fake data                                                                               | [GitHub](https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api)                                                         | | ||||
| | 2 | Vaadin Web UI     | UI-Tester for interactions with Ollama via ollama4j                                                           | [GitHub](https://github.com/TEAMPB/ollama4j-vaadin-ui)                                                                                         | | ||||
| | 3 | ollama-translator | A Minecraft 1.20.6 Spigot plugin that translates all messages into a specific target language via Ollama      | [GitHub](https://github.com/liebki/ollama-translator)                                                                                          | | ||||
| | 4 | AI Player         | A Minecraft mod that adds an intelligent "second player" to the game                                          | [GitHub](https://github.com/shasankp000/AI-Player), <br/> [Reddit Thread](https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/) | | ||||
| | 5 | Ollama4j Web UI   | A web UI for Ollama written in Java using Spring Boot, Vaadin, and Ollama4j                                   | [GitHub](https://github.com/ollama4j/ollama4j-web-ui)                                                                                          | | ||||
| | 6 | JnsCLI            | A command-line tool for Jenkins that manages jobs, builds, and configurations, with AI-powered error analysis | [GitHub](https://github.com/mirum8/jnscli)                                                                                                     | | ||||
| | 7 | Katie Backend     | An open-source AI-based question-answering platform for accessing private domain knowledge                    | [GitHub](https://github.com/wyona/katie-backend)                                                                                               | | ||||
| | 8 | TeleLlama3 Bot    | A question-answering Telegram bot                                                                             | [Repo](https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot)                                                                           | | ||||
| | 9 | moqui-wechat      | A moqui-wechat component                                                                                      | [GitHub](https://github.com/heguangyong/moqui-wechat)                                                                                          | | ||||
|  | ||||
| ## Traction | ||||
|  | ||||
|   | ||||
							
								
								
									
										65
									
								
								docs/docs/apis-generate/custom-roles.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								docs/docs/apis-generate/custom-roles.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| --- | ||||
| sidebar_position: 8 | ||||
| --- | ||||
|  | ||||
| # Custom Roles | ||||
|  | ||||
| Allows to manage custom roles (apart from the base roles) for chat interactions with the models. | ||||
|  | ||||
| _Particularly helpful when you would need to use different roles that the newer models support other than the base | ||||
| roles._ | ||||
|  | ||||
| _Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._ | ||||
|  | ||||
| ### Usage | ||||
|  | ||||
| #### Add new role | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role"); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### List roles | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<OllamaChatMessageRole> roles = ollamaAPI.listRoles(); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### Get role | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role"); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| @@ -35,7 +35,7 @@ public class Main { | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Or, using the `OllamaEmbedResponseModel`: | ||||
| Or, using the `OllamaEmbedRequestModel`: | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 4 | ||||
| sidebar_position: 5 | ||||
| --- | ||||
|  | ||||
| # Create Model | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 5 | ||||
| sidebar_position: 6 | ||||
| --- | ||||
|  | ||||
| # Delete Model | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 3 | ||||
| sidebar_position: 4 | ||||
| --- | ||||
|  | ||||
| # Get Model Details | ||||
|   | ||||
							
								
								
									
										133
									
								
								docs/docs/apis-model-management/list-library-models.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								docs/docs/apis-model-management/list-library-models.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,133 @@ | ||||
| --- | ||||
| sidebar_position: 1 | ||||
| --- | ||||
|  | ||||
| # Models from Ollama Library | ||||
|  | ||||
| These API retrieves a list of models directly from the Ollama library. | ||||
|  | ||||
| ### List Models from Ollama Library | ||||
|  | ||||
| This API fetches available models from the Ollama library page, including details such as the model's name, pull count, | ||||
| popular tags, tag count, and the last update time. | ||||
|  | ||||
| ```java title="ListLibraryModels.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModel; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary(); | ||||
|  | ||||
|         System.out.println(libraryModels); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| [ | ||||
|     LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),  | ||||
|     LibraryModel(name=llama3.2, description=Meta's Llama 3.2 goes small with 1B and 3B models., pullCount=2.4M, totalTags=63, popularTags=[tools, 1b, 3b], lastUpdated=6 weeks ago) | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| ### Get Tags of a Library Model | ||||
|  | ||||
| This API Fetches the tags associated with a specific model from Ollama library. | ||||
|  | ||||
| ```java title="GetLibraryModelTags.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModel; | ||||
| import io.github.ollama4j.models.response.LibraryModelDetail; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary(); | ||||
|  | ||||
|         LibraryModelDetail libraryModelDetail = ollamaAPI.getLibraryModelDetails(libraryModels.get(0)); | ||||
|  | ||||
|         System.out.println(libraryModelDetail); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| LibraryModelDetail( | ||||
|   model=LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),  | ||||
|   tags=[ | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=latest, size=7.9GB, lastUpdated=yesterday),  | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=11b, size=7.9GB, lastUpdated=yesterday),  | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=90b, size=55GB, lastUpdated=yesterday) | ||||
|     ] | ||||
| ) | ||||
| ``` | ||||
|  | ||||
| ### Find a model from Ollama library | ||||
|  | ||||
| This API finds a specific model using model `name` and `tag` from Ollama library. | ||||
|  | ||||
| ```java title="FindLibraryModel.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModelTag; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b"); | ||||
|  | ||||
|         System.out.println(libraryModelTag); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| LibraryModelTag(name=qwen2.5, tag=7b, size=4.7GB, lastUpdated=7 weeks ago) | ||||
| ``` | ||||
|  | ||||
| ### Pull model using `LibraryModelTag` | ||||
|  | ||||
| You can use `LibraryModelTag` to pull models into Ollama server. | ||||
|  | ||||
| ```java title="PullLibraryModelTags.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModelTag; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b"); | ||||
|  | ||||
|         ollamaAPI.pullModel(libraryModelTag); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| @@ -1,10 +1,10 @@ | ||||
| --- | ||||
| sidebar_position: 1 | ||||
| sidebar_position: 2 | ||||
| --- | ||||
|  | ||||
| # List Models | ||||
| # List Local Models | ||||
|  | ||||
| This API lets you list available models on the Ollama server. | ||||
| This API lets you list downloaded/available models on the Ollama server. | ||||
|  | ||||
| ```java title="ListModels.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 2 | ||||
| sidebar_position: 3 | ||||
| --- | ||||
|  | ||||
| # Pull Model | ||||
| @@ -24,3 +24,11 @@ public class Main { | ||||
| ``` | ||||
|  | ||||
| Once downloaded, you can see them when you use [list models](./list-models) API. | ||||
|  | ||||
| :::info | ||||
|  | ||||
| You can even pull models using Ollama model library APIs. This looks up the models directly on the Ollama model library page. Refer | ||||
| to [this](./list-library-models#pull-model-using-librarymodeltag). | ||||
|  | ||||
| ::: | ||||
|  | ||||
|   | ||||
							
								
								
									
										9
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										9
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -63,6 +63,10 @@ | ||||
|                 <groupId>org.apache.maven.plugins</groupId> | ||||
|                 <artifactId>maven-javadoc-plugin</artifactId> | ||||
|                 <version>3.5.0</version> | ||||
|                 <configuration> | ||||
|                     <!-- to disable the "missing" warnings. Remove the doclint to enable warnings--> | ||||
|                     <doclint>all,-missing</doclint> | ||||
|                 </configuration> | ||||
|                 <executions> | ||||
|                     <execution> | ||||
|                         <id>attach-javadocs</id> | ||||
| @@ -136,6 +140,11 @@ | ||||
|             <version>${lombok.version}</version> | ||||
|             <scope>provided</scope> | ||||
|         </dependency> | ||||
|         <dependency> | ||||
|             <groupId>org.jsoup</groupId> | ||||
|             <artifactId>jsoup</artifactId> | ||||
|             <version>1.18.1</version> | ||||
|         </dependency> | ||||
|         <dependency> | ||||
|             <groupId>com.fasterxml.jackson.core</groupId> | ||||
|             <artifactId>jackson-databind</artifactId> | ||||
|   | ||||
| @@ -1,12 +1,10 @@ | ||||
| package io.github.ollama4j; | ||||
|  | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.exceptions.RoleNotFoundException; | ||||
| import io.github.ollama4j.exceptions.ToolInvocationException; | ||||
| import io.github.ollama4j.exceptions.ToolNotFoundException; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessage; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResult; | ||||
| import io.github.ollama4j.models.chat.*; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| @@ -20,8 +18,6 @@ import io.github.ollama4j.tools.*; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Setter; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.*; | ||||
| import java.net.URI; | ||||
| @@ -34,11 +30,19 @@ import java.nio.charset.StandardCharsets; | ||||
| import java.nio.file.Files; | ||||
| import java.time.Duration; | ||||
| import java.util.*; | ||||
| import java.util.stream.Collectors; | ||||
|  | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| import org.jsoup.Jsoup; | ||||
| import org.jsoup.nodes.Document; | ||||
| import org.jsoup.nodes.Element; | ||||
| import org.jsoup.select.Elements; | ||||
|  | ||||
| /** | ||||
|  * The base Ollama API class. | ||||
|  */ | ||||
| @SuppressWarnings("DuplicatedCode") | ||||
| @SuppressWarnings({"DuplicatedCode", "resource"}) | ||||
| public class OllamaAPI { | ||||
|  | ||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||
| @@ -99,12 +103,7 @@ public class OllamaAPI { | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = null; | ||||
|         try { | ||||
|             httpRequest = | ||||
|                     getRequestBuilderDefault(new URI(url)) | ||||
|                             .header("Accept", "application/json") | ||||
|                             .header("Content-type", "application/json") | ||||
|                             .GET() | ||||
|                             .build(); | ||||
|             httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         } catch (URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
| @@ -123,19 +122,17 @@ public class OllamaAPI { | ||||
|     /** | ||||
|      * Provides a list of running models and details about each model currently loaded into memory. | ||||
|      * | ||||
|      * @return ModelsProcessResponse | ||||
|      * @return ModelsProcessResponse containing details about the running models | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      */ | ||||
|     public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException { | ||||
|         String url = this.host + "/api/ps"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = null; | ||||
|         try { | ||||
|             httpRequest = | ||||
|                     getRequestBuilderDefault(new URI(url)) | ||||
|                             .header("Accept", "application/json") | ||||
|                             .header("Content-type", "application/json") | ||||
|                             .GET() | ||||
|                             .build(); | ||||
|             httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         } catch (URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
| @@ -144,69 +141,182 @@ public class OllamaAPI { | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper() | ||||
|                     .readValue(responseString, ModelsProcessResponse.class); | ||||
|             return Utils.getObjectMapper().readValue(responseString, ModelsProcessResponse.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * List available models from Ollama server. | ||||
|      * Lists available models from the Ollama server. | ||||
|      * | ||||
|      * @return the list | ||||
|      * @return a list of models available on the server | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public List<Model> listModels() | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|     public List<Model> listModels() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/tags"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .GET() | ||||
|                         .build(); | ||||
|         HttpResponse<String> response = | ||||
|                 httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper() | ||||
|                     .readValue(responseString, ListModelsResponse.class) | ||||
|                     .getModels(); | ||||
|             return Utils.getObjectMapper().readValue(responseString, ListModelsResponse.class).getModels(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Retrieves a list of models from the Ollama library. This method fetches the available models directly from Ollama | ||||
|      * library page, including model details such as the name, pull count, popular tags, tag count, and the time when model was updated. | ||||
|      * | ||||
|      * @return A list of {@link LibraryModel} objects representing the models available in the Ollama library. | ||||
|      * @throws OllamaBaseException  If the HTTP request fails or the response is not successful (non-200 status code). | ||||
|      * @throws IOException          If an I/O error occurs during the HTTP request or response processing. | ||||
|      * @throws InterruptedException If the thread executing the request is interrupted. | ||||
|      * @throws URISyntaxException   If there is an error creating the URI for the HTTP request. | ||||
|      */ | ||||
|     public List<LibraryModel> listModelsFromLibrary() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = "https://ollama.com/library"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         List<LibraryModel> models = new ArrayList<>(); | ||||
|         if (statusCode == 200) { | ||||
|             Document doc = Jsoup.parse(responseString); | ||||
|             Elements modelSections = doc.selectXpath("//*[@id='repo']/ul/li/a"); | ||||
|             for (Element e : modelSections) { | ||||
|                 LibraryModel model = new LibraryModel(); | ||||
|                 Elements names = e.select("div > h2 > div > span"); | ||||
|                 Elements desc = e.select("div > p"); | ||||
|                 Elements pullCounts = e.select("div:nth-of-type(2) > p > span:first-of-type > span:first-of-type"); | ||||
|                 Elements popularTags = e.select("div > div > span"); | ||||
|                 Elements totalTags = e.select("div:nth-of-type(2) > p > span:nth-of-type(2) > span:first-of-type"); | ||||
|                 Elements lastUpdatedTime = e.select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)"); | ||||
|  | ||||
|                 if (names.first() == null || names.isEmpty()) { | ||||
|                     // if name cannot be extracted, skip. | ||||
|                     continue; | ||||
|                 } | ||||
|                 Optional.ofNullable(names.first()).map(Element::text).ifPresent(model::setName); | ||||
|                 model.setDescription(Optional.ofNullable(desc.first()).map(Element::text).orElse("")); | ||||
|                 model.setPopularTags(Optional.of(popularTags).map(tags -> tags.stream().map(Element::text).collect(Collectors.toList())).orElse(new ArrayList<>())); | ||||
|                 model.setPullCount(Optional.ofNullable(pullCounts.first()).map(Element::text).orElse("")); | ||||
|                 model.setTotalTags(Optional.ofNullable(totalTags.first()).map(Element::text).map(Integer::parseInt).orElse(0)); | ||||
|                 model.setLastUpdated(Optional.ofNullable(lastUpdatedTime.first()).map(Element::text).orElse("")); | ||||
|  | ||||
|                 models.add(model); | ||||
|             } | ||||
|             return models; | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetches the tags associated with a specific model from Ollama library. | ||||
|      * This method fetches the available model tags directly from Ollama library model page, including model tag name, size and time when model was last updated | ||||
|      * into a list of {@link LibraryModelTag} objects. | ||||
|      * | ||||
|      * @param libraryModel the {@link LibraryModel} object which contains the name of the library model | ||||
|      *                     for which the tags need to be fetched. | ||||
|      * @return a list of {@link LibraryModelTag} objects containing the extracted tags and their associated metadata. | ||||
|      * @throws OllamaBaseException  if the HTTP response status code indicates an error (i.e., not 200 OK), | ||||
|      *                              or if there is any other issue during the request or response processing. | ||||
|      * @throws IOException          if an input/output exception occurs during the HTTP request or response handling. | ||||
|      * @throws InterruptedException if the thread is interrupted while waiting for the HTTP response. | ||||
|      * @throws URISyntaxException   if the URI format is incorrect or invalid. | ||||
|      */ | ||||
|     public LibraryModelDetail getLibraryModelDetails(LibraryModel libraryModel) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = String.format("https://ollama.com/library/%s/tags", libraryModel.getName()); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|  | ||||
|         List<LibraryModelTag> libraryModelTags = new ArrayList<>(); | ||||
|         if (statusCode == 200) { | ||||
|             Document doc = Jsoup.parse(responseString); | ||||
|             Elements tagSections = doc.select("html > body > main > div > section > div > div > div:nth-child(n+2) > div"); | ||||
|             for (Element e : tagSections) { | ||||
|                 Elements tags = e.select("div > a > div"); | ||||
|                 Elements tagsMetas = e.select("div > span"); | ||||
|  | ||||
|                 LibraryModelTag libraryModelTag = new LibraryModelTag(); | ||||
|  | ||||
|                 if (tags.first() == null || tags.isEmpty()) { | ||||
|                     // if tag cannot be extracted, skip. | ||||
|                     continue; | ||||
|                 } | ||||
|                 libraryModelTag.setName(libraryModel.getName()); | ||||
|                 Optional.ofNullable(tags.first()).map(Element::text).ifPresent(libraryModelTag::setTag); | ||||
|                 libraryModelTag.setSize(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[1].trim()).orElse("")); | ||||
|                 libraryModelTag.setLastUpdated(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[2].trim()).orElse("")); | ||||
|                 libraryModelTags.add(libraryModelTag); | ||||
|             } | ||||
|             LibraryModelDetail libraryModelDetail = new LibraryModelDetail(); | ||||
|             libraryModelDetail.setModel(libraryModel); | ||||
|             libraryModelDetail.setTags(libraryModelTags); | ||||
|             return libraryModelDetail; | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Finds a specific model using model name and tag from Ollama library. | ||||
|      * <p> | ||||
|      * This method retrieves the model from the Ollama library by its name, then fetches its tags. | ||||
|      * It searches through the tags of the model to find one that matches the specified tag name. | ||||
|      * If the model or the tag is not found, it throws a {@link NoSuchElementException}. | ||||
|      * | ||||
|      * @param modelName The name of the model to search for in the library. | ||||
|      * @param tag       The tag name to search for within the specified model. | ||||
|      * @return The {@link LibraryModelTag} associated with the specified model and tag. | ||||
|      * @throws OllamaBaseException    If there is a problem with the Ollama library operations. | ||||
|      * @throws IOException            If an I/O error occurs during the operation. | ||||
|      * @throws URISyntaxException     If there is an error with the URI syntax. | ||||
|      * @throws InterruptedException   If the operation is interrupted. | ||||
|      * @throws NoSuchElementException If the model or the tag is not found. | ||||
|      */ | ||||
|     public LibraryModelTag findModelTagFromLibrary(String modelName, String tag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         List<LibraryModel> libraryModels = this.listModelsFromLibrary(); | ||||
|         LibraryModel libraryModel = libraryModels.stream().filter(model -> model.getName().equals(modelName)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Model by name '%s' not found", modelName))); | ||||
|         LibraryModelDetail libraryModelDetail = this.getLibraryModelDetails(libraryModel); | ||||
|         LibraryModelTag libraryModelTag = libraryModelDetail.getTags().stream().filter(tagName -> tagName.getTag().equals(tag)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Tag '%s' for model '%s' not found", tag, modelName))); | ||||
|         return libraryModelTag; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Pull a model on the Ollama server from the list of <a | ||||
|      * href="https://ollama.ai/library">available models</a>. | ||||
|      * | ||||
|      * @param modelName the name of the model | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void pullModel(String modelName) | ||||
|             throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|     public void pullModel(String modelName) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         String url = this.host + "/api/pull"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .build(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).POST(HttpRequest.BodyPublishers.ofString(jsonData)).header("Accept", "application/json").header("Content-type", "application/json").build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<InputStream> response = | ||||
|                 client.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|         HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         InputStream responseBodyStream = response.body(); | ||||
|         String responseString = ""; | ||||
|         try (BufferedReader reader = | ||||
|                      new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|         try (BufferedReader reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|             String line; | ||||
|             while ((line = reader.readLine()) != null) { | ||||
|                 ModelPullResponse modelPullResponse = | ||||
|                         Utils.getObjectMapper().readValue(line, ModelPullResponse.class); | ||||
|                 ModelPullResponse modelPullResponse = Utils.getObjectMapper().readValue(line, ModelPullResponse.class); | ||||
|                 if (verbose) { | ||||
|                     logger.info(modelPullResponse.getStatus()); | ||||
|                 } | ||||
| @@ -217,22 +327,37 @@ public class OllamaAPI { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Pulls a model using the specified Ollama library model tag. | ||||
|      * The model is identified by a name and a tag, which are combined into a single identifier | ||||
|      * in the format "name:tag" to pull the corresponding model. | ||||
|      * | ||||
|      * @param libraryModelTag the {@link LibraryModelTag} object containing the name and tag | ||||
|      *                        of the model to be pulled. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void pullModel(LibraryModelTag libraryModelTag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         String tagToPull = String.format("%s:%s", libraryModelTag.getName(), libraryModelTag.getTag()); | ||||
|         pullModel(tagToPull); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Gets model details from the Ollama server. | ||||
|      * | ||||
|      * @param modelName the model | ||||
|      * @return the model details | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public ModelDetail getModelDetails(String modelName) | ||||
|             throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { | ||||
|     public ModelDetail getModelDetails(String modelName) throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/show"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)) | ||||
|                         .build(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
| @@ -250,17 +375,15 @@ public class OllamaAPI { | ||||
|      * | ||||
|      * @param modelName     the name of the custom model to be created. | ||||
|      * @param modelFilePath the path to model file that exists on the Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void createModelWithFilePath(String modelName, String modelFilePath) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|     public void createModelWithFilePath(String modelName, String modelFilePath) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .build(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
| @@ -284,17 +407,15 @@ public class OllamaAPI { | ||||
|      * | ||||
|      * @param modelName         the name of the custom model to be created. | ||||
|      * @param modelFileContents the path to model file that exists on the Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void createModelWithModelFileContents(String modelName, String modelFileContents) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|     public void createModelWithModelFileContents(String modelName, String modelFileContents) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .build(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
| @@ -315,17 +436,15 @@ public class OllamaAPI { | ||||
|      * | ||||
|      * @param modelName          the name of the model to be deleted. | ||||
|      * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void deleteModel(String modelName, boolean ignoreIfNotPresent) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|     public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/delete"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .build(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).header("Accept", "application/json").header("Content-type", "application/json").build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
| @@ -344,11 +463,13 @@ public class OllamaAPI { | ||||
|      * @param model  name of model to generate embeddings from | ||||
|      * @param prompt text to generate embeddings for | ||||
|      * @return embeddings | ||||
|      * @deprecated Use {@link #embed(String, List<String>)} instead. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @deprecated Use {@link #embed(String, List)} instead. | ||||
|      */ | ||||
|     @Deprecated | ||||
|     public List<Double> generateEmbeddings(String model, String prompt) | ||||
|             throws IOException, InterruptedException, OllamaBaseException { | ||||
|     public List<Double> generateEmbeddings(String model, String prompt) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); | ||||
|     } | ||||
|  | ||||
| @@ -357,6 +478,9 @@ public class OllamaAPI { | ||||
|      * | ||||
|      * @param modelRequest request for '/api/embeddings' endpoint | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead. | ||||
|      */ | ||||
|     @Deprecated | ||||
| @@ -364,17 +488,13 @@ public class OllamaAPI { | ||||
|         URI uri = URI.create(this.host + "/api/embeddings"); | ||||
|         String jsonData = modelRequest.toString(); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest.Builder requestBuilder = | ||||
|                 getRequestBuilderDefault(uri) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)); | ||||
|         HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)); | ||||
|         HttpRequest request = requestBuilder.build(); | ||||
|         HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             OllamaEmbeddingResponseModel embeddingResponse = | ||||
|                     Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||
|             OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||
|             return embeddingResponse.getEmbedding(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
| @@ -387,9 +507,11 @@ public class OllamaAPI { | ||||
|      * @param model  name of model to generate embeddings from | ||||
|      * @param inputs text/s to generate embeddings for | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaEmbedResponseModel embed(String model, List<String> inputs) | ||||
|             throws IOException, InterruptedException, OllamaBaseException { | ||||
|     public OllamaEmbedResponseModel embed(String model, List<String> inputs) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         return embed(new OllamaEmbedRequestModel(model, inputs)); | ||||
|     } | ||||
|  | ||||
| @@ -398,26 +520,23 @@ public class OllamaAPI { | ||||
|      * | ||||
|      * @param modelRequest request for '/api/embed' endpoint | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) | ||||
|             throws IOException, InterruptedException, OllamaBaseException { | ||||
|     public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         URI uri = URI.create(this.host + "/api/embed"); | ||||
|         String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|  | ||||
|         HttpRequest request = HttpRequest.newBuilder(uri) | ||||
|                 .header("Accept", "application/json") | ||||
|                 .POST(HttpRequest.BodyPublishers.ofString(jsonData)) | ||||
|                 .build(); | ||||
|         HttpRequest request = HttpRequest.newBuilder(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); | ||||
|  | ||||
|         HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|  | ||||
|         if (statusCode == 200) { | ||||
|             OllamaEmbedResponseModel embeddingResponse = | ||||
|                     Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class); | ||||
|             return embeddingResponse; | ||||
|             return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
| @@ -434,9 +553,11 @@ public class OllamaAPI { | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
| @@ -453,13 +574,14 @@ public class OllamaAPI { | ||||
|      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||
|      * @param options Additional options or configurations to use when generating the response. | ||||
|      * @return {@link OllamaResult} | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generate(model, prompt, raw, options, null); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools | ||||
|      * on the generated response. | ||||
| @@ -468,13 +590,11 @@ public class OllamaAPI { | ||||
|      * @param prompt  The input text or prompt to provide to the AI model. | ||||
|      * @param options Additional options or configurations to use when generating the response. | ||||
|      * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. | ||||
|      * @throws OllamaBaseException  If there is an error related to the Ollama API or service. | ||||
|      * @throws IOException          If there is an error related to input/output operations. | ||||
|      * @throws InterruptedException If the method is interrupted while waiting for the AI model | ||||
|      *                              to generate the response or for the tools to be invoked. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaToolsResult generateWithTools(String model, String prompt, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||
|     public OllamaToolsResult generateWithTools(String model, String prompt, Options options) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||
|         boolean raw = true; | ||||
|         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||
|         Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>(); | ||||
| @@ -495,7 +615,6 @@ public class OllamaAPI { | ||||
|         return toolResult; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||
|      * that can be used to check for status and get the response from the model later. This would be | ||||
| @@ -509,9 +628,7 @@ public class OllamaAPI { | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         URI uri = URI.create(this.host + "/api/generate"); | ||||
|         OllamaAsyncResultStreamer ollamaAsyncResultStreamer = | ||||
|                 new OllamaAsyncResultStreamer( | ||||
|                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||
|         OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer(getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||
|         ollamaAsyncResultStreamer.start(); | ||||
|         return ollamaAsyncResultStreamer; | ||||
|     } | ||||
| @@ -528,10 +645,11 @@ public class OllamaAPI { | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles( | ||||
|             String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|     public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (File imageFile : imageFiles) { | ||||
|             images.add(encodeFileToBase64(imageFile)); | ||||
| @@ -545,10 +663,12 @@ public class OllamaAPI { | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles( | ||||
|             String model, String prompt, List<File> imageFiles, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|     public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||
|     } | ||||
|  | ||||
| @@ -564,10 +684,12 @@ public class OllamaAPI { | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs( | ||||
|             String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (String imageURL : imageURLs) { | ||||
|             images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); | ||||
| @@ -581,14 +703,16 @@ public class OllamaAPI { | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, | ||||
|                                               Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         return generateWithImageURLs(model, prompt, imageURLs, options, null); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api | ||||
|      * 'api/chat'. | ||||
| @@ -599,6 +723,9 @@ public class OllamaAPI { | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||
| @@ -615,6 +742,9 @@ public class OllamaAPI { | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return chat(request, null); | ||||
| @@ -631,6 +761,9 @@ public class OllamaAPI { | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
| @@ -648,6 +781,37 @@ public class OllamaAPI { | ||||
|         toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Adds a custom role. | ||||
|      * | ||||
|      * @param roleName the name of the custom role to be added | ||||
|      * @return the newly created OllamaChatMessageRole | ||||
|      */ | ||||
|     public OllamaChatMessageRole addCustomRole(String roleName) { | ||||
|         return OllamaChatMessageRole.newCustomRole(roleName); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Lists all available roles. | ||||
|      * | ||||
|      * @return a list of available OllamaChatMessageRole objects | ||||
|      */ | ||||
|     public List<OllamaChatMessageRole> listRoles() { | ||||
|         return OllamaChatMessageRole.getRoles(); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Retrieves a specific role by name. | ||||
|      * | ||||
|      * @param roleName the name of the role to retrieve | ||||
|      * @return the OllamaChatMessageRole associated with the given name | ||||
|      * @throws RoleNotFoundException if the role with the specified name does not exist | ||||
|      */ | ||||
|     public OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { | ||||
|         return OllamaChatMessageRole.getRole(roleName); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     // technical private methods // | ||||
|  | ||||
|     private static String encodeFileToBase64(File file) throws IOException { | ||||
| @@ -658,11 +822,8 @@ public class OllamaAPI { | ||||
|         return Base64.getEncoder().encodeToString(bytes); | ||||
|     } | ||||
|  | ||||
|     private OllamaResult generateSyncForOllamaRequestModel( | ||||
|             OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateEndpointCaller requestCaller = | ||||
|                 new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|     private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         OllamaResult result; | ||||
|         if (streamHandler != null) { | ||||
|             ollamaRequestModel.setStream(true); | ||||
| @@ -680,10 +841,7 @@ public class OllamaAPI { | ||||
|      * @return HttpRequest.Builder | ||||
|      */ | ||||
|     private HttpRequest.Builder getRequestBuilderDefault(URI uri) { | ||||
|         HttpRequest.Builder requestBuilder = | ||||
|                 HttpRequest.newBuilder(uri) | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .timeout(Duration.ofSeconds(requestTimeoutSeconds)); | ||||
|         HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri).header("Content-Type", "application/json").timeout(Duration.ofSeconds(requestTimeoutSeconds)); | ||||
|         if (isBasicAuthCredentialsSet()) { | ||||
|             requestBuilder.header("Authorization", getBasicAuthHeaderValue()); | ||||
|         } | ||||
| @@ -709,7 +867,6 @@ public class OllamaAPI { | ||||
|         return basicAuth != null; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { | ||||
|         try { | ||||
|             String methodName = toolFunctionCallSpec.getName(); | ||||
|   | ||||
| @@ -0,0 +1,8 @@ | ||||
| package io.github.ollama4j.exceptions; | ||||
|  | ||||
| public class RoleNotFoundException extends Exception { | ||||
|  | ||||
|     public RoleNotFoundException(String s) { | ||||
|         super(s); | ||||
|     } | ||||
| } | ||||
| @@ -8,6 +8,7 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||
| import io.github.ollama4j.utils.FileToBase64Serializer; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|   | ||||
| @@ -1,20 +1,53 @@ | ||||
| package io.github.ollama4j.models.chat; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonValue; | ||||
| import io.github.ollama4j.exceptions.RoleNotFoundException; | ||||
| import lombok.Getter; | ||||
|  | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Defines the possible Chat Message roles. | ||||
|  */ | ||||
| public enum OllamaChatMessageRole { | ||||
|     SYSTEM("system"), | ||||
|     USER("user"), | ||||
|     ASSISTANT("assistant"), | ||||
|     TOOL("tool"); | ||||
| @Getter | ||||
| public class OllamaChatMessageRole { | ||||
|     private static final List<OllamaChatMessageRole> roles = new ArrayList<>(); | ||||
|  | ||||
|     public static final OllamaChatMessageRole SYSTEM = new OllamaChatMessageRole("system"); | ||||
|     public static final OllamaChatMessageRole USER = new OllamaChatMessageRole("user"); | ||||
|     public static final OllamaChatMessageRole ASSISTANT = new OllamaChatMessageRole("assistant"); | ||||
|     public static final OllamaChatMessageRole TOOL = new OllamaChatMessageRole("tool"); | ||||
|  | ||||
|     @JsonValue | ||||
|     private String roleName; | ||||
|     private final String roleName; | ||||
|  | ||||
|     private OllamaChatMessageRole(String roleName) { | ||||
|         this.roleName = roleName; | ||||
|         roles.add(this); | ||||
|     } | ||||
|  | ||||
|     public static OllamaChatMessageRole newCustomRole(String roleName) { | ||||
|         OllamaChatMessageRole customRole = new OllamaChatMessageRole(roleName); | ||||
|         roles.add(customRole); | ||||
|         return customRole; | ||||
|     } | ||||
|  | ||||
|     public static List<OllamaChatMessageRole> getRoles() { | ||||
|         return new ArrayList<>(roles); | ||||
|     } | ||||
|  | ||||
|     public static OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { | ||||
|         for (OllamaChatMessageRole role : roles) { | ||||
|             if (role.roleName.equals(roleName)) { | ||||
|                 return role; | ||||
|             } | ||||
|         } | ||||
|         throw new RoleNotFoundException("Invalid role name: " + roleName); | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     public String toString() { | ||||
|         return roleName; | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -45,7 +45,7 @@ public class OllamaChatRequestBuilder { | ||||
|             try { | ||||
|                 return Files.readAllBytes(file.toPath()); | ||||
|             } catch (IOException e) { | ||||
|                 LOG.warn(String.format("File '%s' could not be accessed, will not add to message!", file.toPath()), e); | ||||
|                 LOG.warn("File '{}' could not be accessed, will not add to message!", file.toPath(), e); | ||||
|                 return new byte[0]; | ||||
|             } | ||||
|         }).collect(Collectors.toList()); | ||||
| @@ -63,9 +63,9 @@ public class OllamaChatRequestBuilder { | ||||
|                 try { | ||||
|                     binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); | ||||
|                 } catch (URISyntaxException e) { | ||||
|                     LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e); | ||||
|                     LOG.warn("URL '{}' could not be accessed, will not add to message!", imageUrl, e); | ||||
|                 } catch (IOException e) { | ||||
|                     LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e); | ||||
|                     LOG.warn("Content of URL '{}' could not be read, will not add to message!", imageUrl, e); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|   | ||||
| @@ -12,8 +12,7 @@ public class OllamaChatResult extends OllamaResult{ | ||||
|  | ||||
|     private List<OllamaChatMessage> chatHistory; | ||||
|  | ||||
|     public OllamaChatResult(String response, long responseTime, int httpStatusCode, | ||||
|             List<OllamaChatMessage> chatHistory) { | ||||
|     public OllamaChatResult(String response, long responseTime, int httpStatusCode, List<OllamaChatMessage> chatHistory) { | ||||
|         super(response, responseTime, httpStatusCode); | ||||
|         this.chatHistory = chatHistory; | ||||
|         appendAnswerToChatHistory(response); | ||||
| @@ -27,6 +26,4 @@ public class OllamaChatResult extends OllamaResult{ | ||||
|         OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer); | ||||
|         this.chatHistory.add(assistantMessage); | ||||
|     } | ||||
|      | ||||
|      | ||||
| } | ||||
|   | ||||
| @@ -0,0 +1,40 @@ | ||||
| package io.github.ollama4j.models.embeddings; | ||||
|  | ||||
| import io.github.ollama4j.utils.Options; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Builderclass to easily create Requests for Embedding models using ollama. | ||||
|  */ | ||||
| public class OllamaEmbedRequestBuilder { | ||||
|  | ||||
|     private final OllamaEmbedRequestModel request; | ||||
|  | ||||
|     private OllamaEmbedRequestBuilder(String model, List<String> input) { | ||||
|         this.request = new OllamaEmbedRequestModel(model,input); | ||||
|     } | ||||
|  | ||||
|     public static OllamaEmbedRequestBuilder getInstance(String model, String... input){ | ||||
|         return new OllamaEmbedRequestBuilder(model, List.of(input)); | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withOptions(Options options){ | ||||
|         this.request.setOptions(options.getOptionsMap()); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withKeepAlive(String keepAlive){ | ||||
|         this.request.setKeepAlive(keepAlive); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withoutTruncate(){ | ||||
|         this.request.setTruncate(false); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestModel build() { | ||||
|         return this.request; | ||||
|     } | ||||
| } | ||||
| @@ -7,6 +7,7 @@ import lombok.Data; | ||||
|  | ||||
| @SuppressWarnings("unused") | ||||
| @Data | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingResponseModel { | ||||
|     @JsonProperty("embedding") | ||||
|     private List<Double> embedding; | ||||
|   | ||||
| @@ -2,6 +2,7 @@ package io.github.ollama4j.models.embeddings; | ||||
|  | ||||
| import io.github.ollama4j.utils.Options; | ||||
|  | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingsRequestBuilder { | ||||
|  | ||||
|     private OllamaEmbeddingsRequestBuilder(String model, String prompt){ | ||||
|   | ||||
| @@ -12,6 +12,7 @@ import lombok.RequiredArgsConstructor; | ||||
| @Data | ||||
| @RequiredArgsConstructor | ||||
| @NoArgsConstructor | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingsRequestModel { | ||||
|   @NonNull | ||||
|   private String model; | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| package io.github.ollama4j.models.request; | ||||
|  | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import com.fasterxml.jackson.core.type.TypeReference; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessage; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResponseModel; | ||||
| import io.github.ollama4j.models.chat.OllamaChatStreamObserver; | ||||
| @@ -31,14 +33,30 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||
|         return "/api/chat"; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Parses streamed Response line from ollama chat. | ||||
|      * Using {@link com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw | ||||
|      * {@link IllegalArgumentException} in case of null line or {@link com.fasterxml.jackson.core.JsonParseException} | ||||
|      * in case the JSON Object cannot be parsed to a {@link OllamaChatResponseModel}. Thus, the ResponseModel should | ||||
|      * never be null. | ||||
|      * | ||||
|      * @param line streamed line of ollama stream response | ||||
|      * @param responseBuffer Stringbuffer to add latest response message part to | ||||
|      * @return TRUE, if ollama-Response has 'done' state | ||||
|      */ | ||||
|     @Override | ||||
|     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||
|         try { | ||||
|             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||
|             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); | ||||
|             // it seems that under heavy load ollama responds with an empty chat message part in the streamed response | ||||
|             // thus, we null check the message and hope that the next streamed response has some message content again | ||||
|             OllamaChatMessage message = ollamaResponseModel.getMessage(); | ||||
|             if(message != null) { | ||||
|                 responseBuffer.append(message.getContent()); | ||||
|                 if (streamObserver != null) { | ||||
|                     streamObserver.notify(ollamaResponseModel); | ||||
|                 } | ||||
|             } | ||||
|             return ollamaResponseModel.isDone(); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             LOG.error("Error parsing the Ollama chat response!", e); | ||||
|   | ||||
| @@ -0,0 +1,16 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModel { | ||||
|  | ||||
|     private String name; | ||||
|     private String description; | ||||
|     private String pullCount; | ||||
|     private int totalTags; | ||||
|     private List<String> popularTags = new ArrayList<>(); | ||||
|     private String lastUpdated; | ||||
| } | ||||
| @@ -0,0 +1,12 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModelDetail { | ||||
|  | ||||
|     private LibraryModel model; | ||||
|     private List<LibraryModelTag> tags; | ||||
| } | ||||
| @@ -0,0 +1,13 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModelTag { | ||||
|     private String name; | ||||
|     private String tag; | ||||
|     private String size; | ||||
|     private String lastUpdated; | ||||
| } | ||||
| @@ -80,6 +80,18 @@ class TestRealAPIs { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     @Order(2) | ||||
|     void testListModelsFromLibrary() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             assertNotNull(ollamaAPI.listModelsFromLibrary()); | ||||
|             ollamaAPI.listModelsFromLibrary().forEach(System.out::println); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     @Order(2) | ||||
|     void testPullModel() { | ||||
|   | ||||
| @@ -2,6 +2,10 @@ package io.github.ollama4j.unittests; | ||||
|  | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.exceptions.RoleNotFoundException; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; | ||||
| import io.github.ollama4j.models.response.ModelDetail; | ||||
| import io.github.ollama4j.models.response.OllamaAsyncResultStreamer; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| @@ -14,7 +18,9 @@ import java.io.IOException; | ||||
| import java.net.URISyntaxException; | ||||
| import java.util.ArrayList; | ||||
| import java.util.Collections; | ||||
| import java.util.List; | ||||
|  | ||||
| import static org.junit.jupiter.api.Assertions.*; | ||||
| import static org.mockito.Mockito.*; | ||||
|  | ||||
| class TestMockedAPIs { | ||||
| @@ -97,6 +103,34 @@ class TestMockedAPIs { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testEmbed() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         List<String> inputs = List.of("some prompt text"); | ||||
|         try { | ||||
|             when(ollamaAPI.embed(model, inputs)).thenReturn(new OllamaEmbedResponseModel()); | ||||
|             ollamaAPI.embed(model, inputs); | ||||
|             verify(ollamaAPI, times(1)).embed(model, inputs); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testEmbedWithEmbedRequestModel() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         List<String> inputs = List.of("some prompt text"); | ||||
|         try { | ||||
|             when(ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs))).thenReturn(new OllamaEmbedResponseModel()); | ||||
|             ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs)); | ||||
|             verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequestModel(model, inputs)); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testAsk() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
| @@ -161,4 +195,68 @@ class TestMockedAPIs { | ||||
|         ollamaAPI.generateAsync(model, prompt, false); | ||||
|         verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testAddCustomRole() { | ||||
|         OllamaAPI ollamaAPI = mock(OllamaAPI.class); | ||||
|         String roleName = "custom-role"; | ||||
|         OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName); | ||||
|         when(ollamaAPI.addCustomRole(roleName)).thenReturn(expectedRole); | ||||
|         OllamaChatMessageRole customRole = ollamaAPI.addCustomRole(roleName); | ||||
|         assertEquals(expectedRole, customRole); | ||||
|         verify(ollamaAPI, times(1)).addCustomRole(roleName); | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testListRoles() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         OllamaChatMessageRole role1 = OllamaChatMessageRole.newCustomRole("role1"); | ||||
|         OllamaChatMessageRole role2 = OllamaChatMessageRole.newCustomRole("role2"); | ||||
|         List<OllamaChatMessageRole> expectedRoles = List.of(role1, role2); | ||||
|         when(ollamaAPI.listRoles()).thenReturn(expectedRoles); | ||||
|         List<OllamaChatMessageRole> actualRoles = ollamaAPI.listRoles(); | ||||
|         assertEquals(expectedRoles, actualRoles); | ||||
|         verify(ollamaAPI, times(1)).listRoles(); | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testGetRoleNotFound() { | ||||
|         OllamaAPI ollamaAPI = mock(OllamaAPI.class); | ||||
|         String roleName = "non-existing-role"; | ||||
|         try { | ||||
|             when(ollamaAPI.getRole(roleName)).thenThrow(new RoleNotFoundException("Role not found")); | ||||
|         } catch (RoleNotFoundException exception) { | ||||
|             throw new RuntimeException("Failed to run test: testGetRoleNotFound"); | ||||
|         } | ||||
|         try { | ||||
|             ollamaAPI.getRole(roleName); | ||||
|             fail("Expected RoleNotFoundException not thrown"); | ||||
|         } catch (RoleNotFoundException exception) { | ||||
|             assertEquals("Role not found", exception.getMessage()); | ||||
|         } | ||||
|         try { | ||||
|             verify(ollamaAPI, times(1)).getRole(roleName); | ||||
|         } catch (RoleNotFoundException exception) { | ||||
|             throw new RuntimeException("Failed to run test: testGetRoleNotFound"); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     void testGetRoleFound() { | ||||
|         OllamaAPI ollamaAPI = mock(OllamaAPI.class); | ||||
|         String roleName = "existing-role"; | ||||
|         OllamaChatMessageRole expectedRole = OllamaChatMessageRole.newCustomRole(roleName); | ||||
|         try { | ||||
|             when(ollamaAPI.getRole(roleName)).thenReturn(expectedRole); | ||||
|         } catch (RoleNotFoundException exception) { | ||||
|             throw new RuntimeException("Failed to run test: testGetRoleFound"); | ||||
|         } | ||||
|         try { | ||||
|             OllamaChatMessageRole actualRole = ollamaAPI.getRole(roleName); | ||||
|             assertEquals(expectedRole, actualRole); | ||||
|             verify(ollamaAPI, times(1)).getRole(roleName); | ||||
|         } catch (RoleNotFoundException exception) { | ||||
|             throw new RuntimeException("Failed to run test: testGetRoleFound"); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -1,36 +1,37 @@ | ||||
| package io.github.ollama4j.unittests.jackson; | ||||
| 
 | ||||
| import static org.junit.jupiter.api.Assertions.assertEquals; | ||||
| 
 | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestBuilder; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import org.junit.jupiter.api.BeforeEach; | ||||
| import org.junit.jupiter.api.Test; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
| 
 | ||||
| public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> { | ||||
| public class TestEmbedRequestSerialization extends AbstractSerializationTest<OllamaEmbedRequestModel> { | ||||
| 
 | ||||
|         private OllamaEmbeddingsRequestBuilder builder; | ||||
|         private OllamaEmbedRequestBuilder builder; | ||||
| 
 | ||||
|         @BeforeEach | ||||
|         public void init() { | ||||
|             builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt"); | ||||
|             builder = OllamaEmbedRequestBuilder.getInstance("DummyModel","DummyPrompt"); | ||||
|         } | ||||
| 
 | ||||
|             @Test | ||||
|     public void testRequestOnlyMandatoryFields() { | ||||
|         OllamaEmbeddingsRequestModel req = builder.build(); | ||||
|         OllamaEmbedRequestModel req = builder.build(); | ||||
|         String jsonRequest = serialize(req); | ||||
|         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req); | ||||
|         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbedRequestModel.class), req); | ||||
|     } | ||||
| 
 | ||||
|         @Test | ||||
|         public void testRequestWithOptions() { | ||||
|             OptionsBuilder b = new OptionsBuilder(); | ||||
|             OllamaEmbeddingsRequestModel req = builder | ||||
|             OllamaEmbedRequestModel req = builder | ||||
|                     .withOptions(b.setMirostat(1).build()).build(); | ||||
| 
 | ||||
|             String jsonRequest = serialize(req); | ||||
|             OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class); | ||||
|             OllamaEmbedRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbedRequestModel.class); | ||||
|             assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||
|             assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||
|         } | ||||
		Reference in New Issue
	
	Block a user