mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-10-31 00:20:40 +01:00 
			
		
		
		
	Compare commits
	
		
			2 Commits
		
	
	
		
			1.0.74
			...
			62f3c3efa5
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 62f3c3efa5 | |||
| 613e152e7d | 
							
								
								
									
										69
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										69
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,41 +1,68 @@ | ||||
| # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||
| # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||
|  | ||||
| name: Release Artifacts | ||||
| name: Test and Publish Package | ||||
|  | ||||
| #on: | ||||
| #  release: | ||||
| #    types: [ "created" ] | ||||
|  | ||||
| on: | ||||
|   release: | ||||
|     types: [ created ] | ||||
|   push: | ||||
|     branches: [ "main" ] | ||||
|   workflow_dispatch: | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       contents: read | ||||
|       contents: write | ||||
|       packages: write | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|       - name: Set up JDK 17 | ||||
|       - name: Set up JDK 11 | ||||
|         uses: actions/setup-java@v3 | ||||
|         with: | ||||
|           java-version: '17' | ||||
|           distribution: 'temurin' | ||||
|           java-version: '11' | ||||
|           distribution: 'adopt-hotspot' | ||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||
|  | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
|           find: "ollama4j-revision" | ||||
|           replace: ${{ github.ref_name }} | ||||
|           regex: false | ||||
|  | ||||
|       - name: Build with Maven | ||||
|         run: mvn --file pom.xml -U clean package -Punit-tests | ||||
|  | ||||
|       - name: Publish to GitHub Packages Apache Maven | ||||
|         run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml --file pom.xml | ||||
|       - name: Set up Apache Maven Central (Overwrite settings.xml) | ||||
|         uses: actions/setup-java@v3 | ||||
|         with: # running setup-java again overwrites the settings.xml | ||||
|           java-version: '11' | ||||
|           distribution: 'adopt-hotspot' | ||||
|           cache: 'maven' | ||||
|           server-id: ossrh | ||||
|           server-username: MAVEN_USERNAME | ||||
|           server-password: MAVEN_PASSWORD | ||||
|           gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||
|           gpg-passphrase: MAVEN_GPG_PASSPHRASE | ||||
|       - name: Set up Maven cache | ||||
|         uses: actions/cache@v3 | ||||
|         with: | ||||
|           path: ~/.m2/repository | ||||
|           key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} | ||||
|           restore-keys: | | ||||
|             ${{ runner.os }}-maven- | ||||
|       - name: Build | ||||
|         run: mvn -B -ntp clean install | ||||
|       - name: Upload coverage reports to Codecov | ||||
|         uses: codecov/codecov-action@v3 | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ github.token }} | ||||
|           CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | ||||
|       - name: Publish to GitHub Packages Apache Maven | ||||
|         #        if: > | ||||
|         #          github.event_name != 'pull_request' && | ||||
|         #          github.ref_name == 'main' && | ||||
|         #          contains(github.event.head_commit.message, 'release') | ||||
|         run: | | ||||
|           git config --global user.email "koujalgi.amith@gmail.com" | ||||
|           git config --global user.name "amithkoujalgi" | ||||
|           mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform | ||||
|         env: | ||||
|           MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} | ||||
|           MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} | ||||
|           MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} | ||||
							
								
								
									
										5
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							| @@ -2,8 +2,9 @@ | ||||
| name: Deploy Docs to GH Pages | ||||
|  | ||||
| on: | ||||
|   release: | ||||
|     types: [ created ] | ||||
|   # Runs on pushes targeting the default branch | ||||
|   push: | ||||
|     branches: [ "main" ] | ||||
|  | ||||
|   # Allows you to run this workflow manually from the Actions tab | ||||
|   workflow_dispatch: | ||||
|   | ||||
							
								
								
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| # Simple workflow for deploying static content to GitHub Pages | ||||
| name: Deploy Javadoc content to Pages | ||||
|  | ||||
| on: | ||||
|   # Runs on pushes targeting the default branch | ||||
|   push: | ||||
|     branches: [ "none" ] | ||||
|  | ||||
|   # Allows you to run this workflow manually from the Actions tab | ||||
|   workflow_dispatch: | ||||
|  | ||||
| # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages | ||||
| permissions: | ||||
|   contents: read | ||||
|   pages: write | ||||
|   id-token: write | ||||
|   packages: write | ||||
| # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. | ||||
| # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. | ||||
| concurrency: | ||||
|   group: "pages" | ||||
|   cancel-in-progress: false | ||||
|  | ||||
| jobs: | ||||
|   # Single deploy job since we're just deploying | ||||
|   deploy: | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     environment: | ||||
|       name: github-pages | ||||
|       url: ${{ steps.deployment.outputs.page_url }} | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|       - name: Set up JDK 11 | ||||
|         uses: actions/setup-java@v3 | ||||
|         with: | ||||
|           java-version: '11' | ||||
|           distribution: 'adopt-hotspot' | ||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||
|       - name: Build with Maven | ||||
|         run: mvn --file pom.xml -U clean package | ||||
|       - name: Setup Pages | ||||
|         uses: actions/configure-pages@v3 | ||||
|       - name: Upload artifact | ||||
|         uses: actions/upload-pages-artifact@v2 | ||||
|         with: | ||||
|           # Upload entire repository | ||||
|           path: './target/apidocs/.' | ||||
|       - name: Deploy to GitHub Pages | ||||
|         id: deployment | ||||
|         uses: actions/deploy-pages@v2 | ||||
							
								
								
									
										112
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										112
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,5 +1,3 @@ | ||||
| <div style="text-align: center"> | ||||
|  | ||||
| ### Ollama4j | ||||
|  | ||||
| <img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | ||||
| @@ -11,36 +9,23 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| [](https://codecov.io/gh/amithkoujalgi/ollama4j) | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| </div> | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| ## Table of Contents | ||||
|  | ||||
| - [How does it work?](#how-does-it-work) | ||||
| - [Requirements](#requirements) | ||||
| - [Installation](#installation) | ||||
| - [API Spec](https://amithkoujalgi.github.io/ollama4j/docs/category/apis---model-management) | ||||
| - [Javadoc Spec](https://amithkoujalgi.github.io/ollama4j/apidocs/) | ||||
| - [API Spec](#api-spec) | ||||
| - [Demo APIs](#try-out-the-apis-with-ollama-server) | ||||
| - [Development](#development) | ||||
| - [Contributions](#get-involved) | ||||
| - [References](#references) | ||||
| @@ -75,68 +60,25 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | ||||
|  | ||||
| #### Installation | ||||
|  | ||||
| Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases). | ||||
|  | ||||
|  | ||||
|  | ||||
| ##### For Maven | ||||
|  | ||||
| 1. In your Maven project, add this dependency: | ||||
| In your Maven project, add this dependency: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <dependency> | ||||
|     <groupId>io.github.amithkoujalgi</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>v1.0.74</version> | ||||
|     <version>1.0.70</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| 2. Add repository to your project's pom.xml: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <repositories> | ||||
|     <repository> | ||||
|         <id>github</id> | ||||
|         <name>GitHub Apache Maven Packages</name> | ||||
|         <url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url> | ||||
|         <releases> | ||||
|             <enabled>true</enabled> | ||||
|         </releases> | ||||
|         <snapshots> | ||||
|             <enabled>true</enabled> | ||||
|         </snapshots> | ||||
|     </repository> | ||||
| </repositories> | ||||
| ``` | ||||
|  | ||||
| 3. Add GitHub server to settings.xml. (Usually available at ~/.m2/settings.xml) | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0" | ||||
|           xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||
|           xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 | ||||
|                       http://maven.apache.org/xsd/settings-1.0.0.xsd"> | ||||
|     <servers> | ||||
|         <server> | ||||
|             <id>github</id> | ||||
|             <username>YOUR-USERNAME</username> | ||||
|             <password>YOUR-TOKEN</password> | ||||
|         </server> | ||||
|     </servers> | ||||
| </settings> | ||||
| ``` | ||||
|  | ||||
| ##### For Gradle | ||||
| or | ||||
|  | ||||
| In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL: | ||||
|  | ||||
| ```kotlin | ||||
| dependencies { | ||||
|  | ||||
|     val ollama4jVersion = "1.0.74" | ||||
|     val ollama4jVersion = "1.0.70" | ||||
|  | ||||
|     implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion") | ||||
| } | ||||
| @@ -144,19 +86,15 @@ dependencies { | ||||
|  | ||||
| ```groovy | ||||
| dependencies { | ||||
|     implementation("io.github.amithkoujalgi:ollama4j:1.0.74") | ||||
|     implementation("io.github.amithkoujalgi:ollama4j:1.0.70") | ||||
| } | ||||
| ``` | ||||
|  | ||||
| [//]: # (Latest release:) | ||||
| Latest release: | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # ([![][lib-shield]][lib]) | ||||
| [![][lib-shield]][lib] | ||||
|  | ||||
| [lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j | ||||
|  | ||||
| @@ -215,9 +153,6 @@ Actions CI workflow. | ||||
| - [x] Use lombok | ||||
| - [x] Update request body creation with Java objects | ||||
| - [ ] Async APIs for images | ||||
| - [ ] Support for function calling with models like Mistral | ||||
|     - [x] generate in sync mode | ||||
|     - [ ] generate in async mode | ||||
| - [ ] Add custom headers to requests | ||||
| - [x] Add additional params for `ask` APIs such as: | ||||
|     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - | ||||
| @@ -237,28 +172,11 @@ Contributions are most welcome! Whether it's reporting a bug, proposing an enhan | ||||
| with code - any sort | ||||
| of contribution is much appreciated. | ||||
|  | ||||
| ### References | ||||
|  | ||||
| - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | ||||
|  | ||||
| ### Credits | ||||
|  | ||||
| The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | ||||
| project. | ||||
|  | ||||
| ### References | ||||
|  | ||||
| <div style="text-align: center"> | ||||
|  | ||||
| **Thanks to the amazing contributors** | ||||
|  | ||||
| <a href="https://github.com/amithkoujalgi/ollama4j/graphs/contributors"> | ||||
|   <img src="https://contrib.rocks/image?repo=amithkoujalgi/ollama4j" /> | ||||
| </a> | ||||
|  | ||||
| ### Appreciate my work? | ||||
|  | ||||
| <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> | ||||
|  | ||||
|  | ||||
| </div> | ||||
|  | ||||
| - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 3 | ||||
| sidebar_position: 2 | ||||
| --- | ||||
|  | ||||
| # Generate - Async | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 4 | ||||
| sidebar_position: 3 | ||||
| --- | ||||
|  | ||||
| # Generate - With Image Files | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 5 | ||||
| sidebar_position: 4 | ||||
| --- | ||||
|  | ||||
| # Generate - With Image URLs | ||||
|   | ||||
| @@ -1,271 +0,0 @@ | ||||
| --- | ||||
| sidebar_position: 2 | ||||
| --- | ||||
|  | ||||
| # Generate - With Tools | ||||
|  | ||||
| This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a | ||||
| synchronous way. | ||||
| This API correlates to | ||||
| the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. | ||||
|  | ||||
| :::note | ||||
|  | ||||
| This is an only an experimental implementation and has a very basic design. | ||||
|  | ||||
| Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign | ||||
| this | ||||
| in the future if tooling is supported for more models with a generic interaction standard from Ollama. | ||||
|  | ||||
| ::: | ||||
|  | ||||
| ### Function Calling/Tools | ||||
|  | ||||
| Assume you want to call a method in your code based on the response generated from the model. | ||||
| For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the | ||||
| transaction from your database and respond to the user with the transaction details. | ||||
|  | ||||
| You could do that with ease with the `function calling` capabilities of the models by registering your `tools`. | ||||
|  | ||||
| ### Create Functions | ||||
|  | ||||
| This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns a | ||||
| value. | ||||
|  | ||||
| ```java | ||||
| public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
|     String location = arguments.get("location").toString(); | ||||
|     String fuelType = arguments.get("fuelType").toString(); | ||||
|     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| This function takes the argument `city` and performs an operation with the argument and returns a | ||||
| value. | ||||
|  | ||||
| ```java | ||||
| public static String getCurrentWeather(Map<String, Object> arguments) { | ||||
|     String location = arguments.get("city").toString(); | ||||
|     return "Currently " + location + "'s weather is nice."; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Define Tool Specifications | ||||
|  | ||||
| Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. | ||||
|  | ||||
| - Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`). | ||||
| - Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. | ||||
|  | ||||
| ```java | ||||
| MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|         .functionName("current-fuel-price") | ||||
|         .functionDesc("Get current fuel price") | ||||
|         .props( | ||||
|                 new MistralTools.PropsBuilder() | ||||
|                         .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                         .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|                         .build() | ||||
|         ) | ||||
|         .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||
|         .build(); | ||||
| ``` | ||||
|  | ||||
| Lets also define a sample tool specification called **Weather Tool** for getting the current weather. | ||||
|  | ||||
| - Specify the function `name`, `description`, and `required` property (`city`). | ||||
| - Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. | ||||
|  | ||||
| ```java | ||||
| MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|         .functionName("current-weather") | ||||
|         .functionDesc("Get current weather") | ||||
|         .props( | ||||
|                 new MistralTools.PropsBuilder() | ||||
|                         .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                         .build() | ||||
|         ) | ||||
|         .toolDefinition(SampleTools::getCurrentWeather) | ||||
|         .build(); | ||||
| ``` | ||||
|  | ||||
| ### Register the Tools | ||||
|  | ||||
| Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. | ||||
|  | ||||
| ```shell | ||||
| ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||
| ollamaAPI.registerTool(weatherToolSpecification); | ||||
| ``` | ||||
|  | ||||
| ### Create prompt with Tools | ||||
|  | ||||
| `Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. | ||||
|  | ||||
| ```shell | ||||
| String prompt1 = new MistralTools.PromptBuilder() | ||||
|         .withToolSpecification(fuelPriceToolSpecification) | ||||
|         .withToolSpecification(weatherToolSpecification) | ||||
|         .withPrompt("What is the petrol price in Bengaluru?") | ||||
|         .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, false, new OptionsBuilder().build()); | ||||
| for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|   System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Now, fire away your question to the model. | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
|  | ||||
| :::: | ||||
|  | ||||
| `Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. | ||||
|  | ||||
| ```shell | ||||
| String prompt2 = new MistralTools.PromptBuilder() | ||||
|         .withToolSpecification(fuelPriceToolSpecification) | ||||
|         .withToolSpecification(weatherToolSpecification) | ||||
|         .withPrompt("What is the current weather in Bengaluru?") | ||||
|         .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, false, new OptionsBuilder().build()); | ||||
| for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|   System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Again, fire away your question to the model. | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-weather']: Currently Bengaluru's weather is nice | ||||
| :::: | ||||
|  | ||||
| ### Full Example | ||||
|  | ||||
| ```java | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.ToolDef; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.MistralTools; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
|  | ||||
| import java.io.IOException; | ||||
| import java.util.Arrays; | ||||
| import java.util.Map; | ||||
|  | ||||
| public class FunctionCallingWithMistral { | ||||
|     public static void main(String[] args) throws Exception { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|         ollamaAPI.setRequestTimeoutSeconds(60); | ||||
|  | ||||
|         String model = "mistral"; | ||||
|  | ||||
|  | ||||
|         MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|                 .functionName("current-fuel-price") | ||||
|                 .functionDesc("Get current fuel price") | ||||
|                 .props( | ||||
|                         new MistralTools.PropsBuilder() | ||||
|                                 .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                                 .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|                                 .build() | ||||
|                 ) | ||||
|                 .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||
|                 .build(); | ||||
|  | ||||
|         MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|                 .functionName("current-weather") | ||||
|                 .functionDesc("Get current weather") | ||||
|                 .props( | ||||
|                         new MistralTools.PropsBuilder() | ||||
|                                 .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                                 .build() | ||||
|                 ) | ||||
|                 .toolDefinition(SampleTools::getCurrentWeather) | ||||
|                 .build(); | ||||
|  | ||||
|         ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||
|         ollamaAPI.registerTool(weatherToolSpecification); | ||||
|  | ||||
|         String prompt1 = new MistralTools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the petrol price in Bengaluru?") | ||||
|                 .build(); | ||||
|         String prompt2 = new MistralTools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the current weather in Bengaluru?") | ||||
|                 .build(); | ||||
|  | ||||
|         ask(ollamaAPI, model, prompt1); | ||||
|         ask(ollamaAPI, model, prompt2); | ||||
|     } | ||||
|  | ||||
|     public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, false, new OptionsBuilder().build()); | ||||
|         for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|             System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| class SampleTools { | ||||
|     public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
|         String location = arguments.get("location").toString(); | ||||
|         String fuelType = arguments.get("fuelType").toString(); | ||||
|         return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||
|     } | ||||
|  | ||||
|     public static String getCurrentWeather(Map<String, Object> arguments) { | ||||
|         String location = arguments.get("city").toString(); | ||||
|         return "Currently " + location + "'s weather is nice."; | ||||
|     } | ||||
| } | ||||
|  | ||||
| ``` | ||||
|  | ||||
| Run this full example and you will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
|  | ||||
| [Response from tool 'current-weather']: Currently Bengaluru's weather is nice | ||||
| :::: | ||||
|  | ||||
| ### Room for improvement | ||||
|  | ||||
| Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool | ||||
| registration. For example: | ||||
|  | ||||
| ```java | ||||
|  | ||||
| @ToolSpec(name = "current-fuel-price", desc = "Get current fuel price") | ||||
| public String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
|     String location = arguments.get("location").toString(); | ||||
|     String fuelType = arguments.get("fuelType").toString(); | ||||
|     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing | ||||
| specific args separately with their data types. For example: | ||||
|  | ||||
| ```shell | ||||
| public String getCurrentFuelPrice(String location, String fuelType) { | ||||
|     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Updating async/chat APIs with support for tool-based generation.  | ||||
| @@ -11,7 +11,7 @@ the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#gener | ||||
| Use the `OptionBuilder` to build the `Options` object | ||||
| with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||
| Refer | ||||
| to [this](/apis-extras/options-builder). | ||||
| to [this](/docs/apis-extras/options-builder). | ||||
|  | ||||
| ## Try asking a question about the model. | ||||
|  | ||||
| @@ -53,26 +53,25 @@ public class Main { | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|         // define a stream handler (Consumer<String>) | ||||
|         OllamaStreamHandler streamHandler = (s) -> { | ||||
|             System.out.println(s); | ||||
|            System.out.println(s); | ||||
|         }; | ||||
|  | ||||
|         // Should be called using seperate thread to gain non blocking streaming effect. | ||||
|         OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||
|                 "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|                 new OptionsBuilder().build(), streamHandler); | ||||
|  | ||||
|         System.out.println("Full response: " + result.getResponse()); | ||||
|           "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|           new OptionsBuilder().build(), streamHandler); | ||||
|          | ||||
|         System.out.println("Full response: " +result.getResponse()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| > The | ||||
| > The capital | ||||
| > The capital of | ||||
| > The capital of France | ||||
| > The capital of France is | ||||
| > The capital of France is  | ||||
| > The capital of France is Paris | ||||
| > The capital of France is Paris. | ||||
| > Full response: The capital of France is Paris. | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 6 | ||||
| sidebar_position: 5 | ||||
| --- | ||||
|  | ||||
| # Prompt Builder | ||||
|   | ||||
| @@ -40,8 +40,6 @@ const config = { | ||||
|             /** @type {import('@docusaurus/preset-classic').Options} */ | ||||
|             ({ | ||||
|                 docs: { | ||||
|                     path: 'docs', | ||||
|                     routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro. | ||||
|                     sidebarPath: './sidebars.js', | ||||
|                     // Please change this to your repo. | ||||
|                     // Remove this to remove the "edit this page" links. | ||||
| @@ -98,7 +96,7 @@ const config = { | ||||
|                         items: [ | ||||
|                             { | ||||
|                                 label: 'Tutorial', | ||||
|                                 to: '/intro', | ||||
|                                 to: '/docs/intro', | ||||
|                             }, | ||||
|                         ], | ||||
|                     }, | ||||
|   | ||||
							
								
								
									
										1951
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1951
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -14,9 +14,9 @@ | ||||
|     "write-heading-ids": "docusaurus write-heading-ids" | ||||
|   }, | ||||
|   "dependencies": { | ||||
|     "@docusaurus/core": "^3.4.0", | ||||
|     "@docusaurus/preset-classic": "^3.4.0", | ||||
|     "@docusaurus/theme-mermaid": "^3.4.0", | ||||
|     "@docusaurus/core": "3.0.1", | ||||
|     "@docusaurus/preset-classic": "3.0.1", | ||||
|     "@docusaurus/theme-mermaid": "^3.0.1", | ||||
|     "@mdx-js/react": "^3.0.0", | ||||
|     "clsx": "^2.0.0", | ||||
|     "prism-react-renderer": "^2.3.0", | ||||
| @@ -24,8 +24,8 @@ | ||||
|     "react-dom": "^18.0.0" | ||||
|   }, | ||||
|   "devDependencies": { | ||||
|     "@docusaurus/module-type-aliases": "^3.4.0", | ||||
|     "@docusaurus/types": "^3.4.0" | ||||
|     "@docusaurus/module-type-aliases": "3.0.1", | ||||
|     "@docusaurus/types": "3.0.1" | ||||
|   }, | ||||
|   "browserslist": { | ||||
|     "production": [ | ||||
|   | ||||
| @@ -19,7 +19,7 @@ function HomepageHeader() { | ||||
|             <div className={styles.buttons}> | ||||
|                 <Link | ||||
|                     className="button button--secondary button--lg" | ||||
|                     to="/intro"> | ||||
|                     to="/docs/intro"> | ||||
|                     Getting Started | ||||
|                 </Link> | ||||
|             </div> | ||||
|   | ||||
| @@ -1,68 +0,0 @@ | ||||
| ## This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||
| ## For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||
| # | ||||
| #name: Test and Publish Package | ||||
| # | ||||
| ##on: | ||||
| ##  release: | ||||
| ##    types: [ "created" ] | ||||
| # | ||||
| #on: | ||||
| #  push: | ||||
| #    branches: [ "main" ] | ||||
| #  workflow_dispatch: | ||||
| # | ||||
| #jobs: | ||||
| #  build: | ||||
| #    runs-on: ubuntu-latest | ||||
| #    permissions: | ||||
| #      contents: write | ||||
| #      packages: write | ||||
| #    steps: | ||||
| #      - uses: actions/checkout@v3 | ||||
| #      - name: Set up JDK 11 | ||||
| #        uses: actions/setup-java@v3 | ||||
| #        with: | ||||
| #          java-version: '11' | ||||
| #          distribution: 'adopt-hotspot' | ||||
| #          server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||
| #          settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||
| #      - name: Build with Maven | ||||
| #        run: mvn --file pom.xml -U clean package -Punit-tests | ||||
| #      - name: Set up Apache Maven Central (Overwrite settings.xml) | ||||
| #        uses: actions/setup-java@v3 | ||||
| #        with: # running setup-java again overwrites the settings.xml | ||||
| #          java-version: '11' | ||||
| #          distribution: 'adopt-hotspot' | ||||
| #          cache: 'maven' | ||||
| #          server-id: ossrh | ||||
| #          server-username: MAVEN_USERNAME | ||||
| #          server-password: MAVEN_PASSWORD | ||||
| #          gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||
| #          gpg-passphrase: MAVEN_GPG_PASSPHRASE | ||||
| #      - name: Set up Maven cache | ||||
| #        uses: actions/cache@v3 | ||||
| #        with: | ||||
| #          path: ~/.m2/repository | ||||
| #          key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} | ||||
| #          restore-keys: | | ||||
| #            ${{ runner.os }}-maven- | ||||
| #      - name: Build | ||||
| #        run: mvn -B -ntp clean install | ||||
| #      - name: Upload coverage reports to Codecov | ||||
| #        uses: codecov/codecov-action@v3 | ||||
| #        env: | ||||
| #          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | ||||
| #      - name: Publish to GitHub Packages Apache Maven | ||||
| #        #        if: > | ||||
| #        #          github.event_name != 'pull_request' && | ||||
| #        #          github.ref_name == 'main' && | ||||
| #        #          contains(github.event.head_commit.message, 'release') | ||||
| #        run: | | ||||
| #          git config --global user.email "koujalgi.amith@gmail.com" | ||||
| #          git config --global user.name "amithkoujalgi" | ||||
| #          mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform | ||||
| #        env: | ||||
| #          MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} | ||||
| #          MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} | ||||
| #          MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} | ||||
							
								
								
									
										113
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										113
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -1,16 +1,14 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||
|          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||||
| <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||||
|     <modelVersion>4.0.0</modelVersion> | ||||
|  | ||||
|     <groupId>io.github.amithkoujalgi</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>ollama4j-revision</version> | ||||
|     <version>1.0.73-SNAPSHOT</version> | ||||
|  | ||||
|     <name>Ollama4j</name> | ||||
|     <description>Java library for interacting with Ollama API.</description> | ||||
|     <url>https://github.com/amithkoujalgi/ollama4j</url> | ||||
|     <packaging>jar</packaging> | ||||
|  | ||||
|     <properties> | ||||
|         <maven.compiler.source>11</maven.compiler.source> | ||||
| @@ -129,15 +127,15 @@ | ||||
|                     </execution> | ||||
|                 </executions> | ||||
|             </plugin> | ||||
|             <!--            <plugin>--> | ||||
|             <!--                <groupId>org.apache.maven.plugins</groupId>--> | ||||
|             <!--                <artifactId>maven-release-plugin</artifactId>--> | ||||
|             <!--                <version>3.0.1</version>--> | ||||
|             <!--                <configuration>--> | ||||
|             <!--                    <!–                    <goals>install</goals>–>--> | ||||
|             <!--                    <tagNameFormat>v@{project.version}</tagNameFormat>--> | ||||
|             <!--                </configuration>--> | ||||
|             <!--            </plugin>--> | ||||
|             <plugin> | ||||
|                 <groupId>org.apache.maven.plugins</groupId> | ||||
|                 <artifactId>maven-release-plugin</artifactId> | ||||
|                 <version>3.0.1</version> | ||||
|                 <configuration> | ||||
|                     <!--                    <goals>install</goals>--> | ||||
|                     <tagNameFormat>v@{project.version}</tagNameFormat> | ||||
|                 </configuration> | ||||
|             </plugin> | ||||
|         </plugins> | ||||
|     </build> | ||||
|  | ||||
| @@ -161,7 +159,7 @@ | ||||
|         <dependency> | ||||
|             <groupId>ch.qos.logback</groupId> | ||||
|             <artifactId>logback-classic</artifactId> | ||||
|             <version>1.5.6</version> | ||||
|             <version>1.4.12</version> | ||||
|             <scope>test</scope> | ||||
|         </dependency> | ||||
|         <dependency> | ||||
| @@ -189,23 +187,14 @@ | ||||
|         </dependency> | ||||
|     </dependencies> | ||||
|  | ||||
|     <!--    <distributionManagement>--> | ||||
|     <!--        <snapshotRepository>--> | ||||
|     <!--            <id>ossrh</id>--> | ||||
|     <!--            <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>--> | ||||
|     <!--        </snapshotRepository>--> | ||||
|     <!--        <repository>--> | ||||
|     <!--            <id>ossrh</id>--> | ||||
|     <!--            <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>--> | ||||
|     <!--        </repository>--> | ||||
|     <!--    </distributionManagement>--> | ||||
|  | ||||
|     <!--    Replaced publishing packages to GitHub Packages instead of Maven central --> | ||||
|     <distributionManagement> | ||||
|         <snapshotRepository> | ||||
|             <id>ossrh</id> | ||||
|             <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url> | ||||
|         </snapshotRepository> | ||||
|         <repository> | ||||
|             <id>github</id> | ||||
|             <name>GitHub Packages</name> | ||||
|             <url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url> | ||||
|             <id>ossrh</id> | ||||
|             <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url> | ||||
|         </repository> | ||||
|     </distributionManagement> | ||||
|  | ||||
| @@ -261,39 +250,39 @@ | ||||
|             </properties> | ||||
|             <build> | ||||
|                 <plugins> | ||||
|                     <!--                    <plugin>--> | ||||
|                     <!--                        <groupId>org.apache.maven.plugins</groupId>--> | ||||
|                     <!--                        <artifactId>maven-gpg-plugin</artifactId>--> | ||||
|                     <!--                        <version>3.1.0</version>--> | ||||
|                     <!--                        <executions>--> | ||||
|                     <!--                            <execution>--> | ||||
|                     <!--                                <id>sign-artifacts</id>--> | ||||
|                     <!--                                <phase>verify</phase>--> | ||||
|                     <!--                                <goals>--> | ||||
|                     <!--                                    <goal>sign</goal>--> | ||||
|                     <!--                                </goals>--> | ||||
|                     <!--                                <configuration>--> | ||||
|                     <!--                                    <!– Prevent gpg from using pinentry programs. Fixes:--> | ||||
|                     <!--                                         gpg: signing failed: Inappropriate ioctl for device –>--> | ||||
|                     <!--                                    <gpgArguments>--> | ||||
|                     <!--                                        <arg>--pinentry-mode</arg>--> | ||||
|                     <!--                                        <arg>loopback</arg>--> | ||||
|                     <!--                                    </gpgArguments>--> | ||||
|                     <!--                                </configuration>--> | ||||
|                     <!--                            </execution>--> | ||||
|                     <!--                        </executions>--> | ||||
|                     <!--                    </plugin>--> | ||||
|                     <!--                    <plugin>--> | ||||
|                     <!--                        <groupId>org.sonatype.plugins</groupId>--> | ||||
|                     <!--                        <artifactId>nexus-staging-maven-plugin</artifactId>--> | ||||
|                     <!--                        <version>1.6.13</version>--> | ||||
|                     <!--                        <extensions>true</extensions>--> | ||||
|                     <!--                        <configuration>--> | ||||
|                     <!--                            <serverId>ossrh</serverId>--> | ||||
|                     <!--                            <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>--> | ||||
|                     <!--                            <autoReleaseAfterClose>true</autoReleaseAfterClose>--> | ||||
|                     <!--                        </configuration>--> | ||||
|                     <!--                    </plugin>--> | ||||
|                     <plugin> | ||||
|                         <groupId>org.apache.maven.plugins</groupId> | ||||
|                         <artifactId>maven-gpg-plugin</artifactId> | ||||
|                         <version>3.1.0</version> | ||||
|                         <executions> | ||||
|                             <execution> | ||||
|                                 <id>sign-artifacts</id> | ||||
|                                 <phase>verify</phase> | ||||
|                                 <goals> | ||||
|                                     <goal>sign</goal> | ||||
|                                 </goals> | ||||
|                                 <configuration> | ||||
|                                     <!-- Prevent gpg from using pinentry programs. Fixes: | ||||
|                                          gpg: signing failed: Inappropriate ioctl for device --> | ||||
|                                     <gpgArguments> | ||||
|                                         <arg>--pinentry-mode</arg> | ||||
|                                         <arg>loopback</arg> | ||||
|                                     </gpgArguments> | ||||
|                                 </configuration> | ||||
|                             </execution> | ||||
|                         </executions> | ||||
|                     </plugin> | ||||
|                     <plugin> | ||||
|                         <groupId>org.sonatype.plugins</groupId> | ||||
|                         <artifactId>nexus-staging-maven-plugin</artifactId> | ||||
|                         <version>1.6.13</version> | ||||
|                         <extensions>true</extensions> | ||||
|                         <configuration> | ||||
|                             <serverId>ossrh</serverId> | ||||
|                             <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl> | ||||
|                             <autoReleaseAfterClose>true</autoReleaseAfterClose> | ||||
|                         </configuration> | ||||
|                     </plugin> | ||||
|  | ||||
|                     <plugin> | ||||
|                         <groupId>org.jacoco</groupId> | ||||
|   | ||||
| @@ -10,7 +10,6 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingRe | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.*; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.*; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| @@ -26,7 +25,9 @@ import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| import java.nio.file.Files; | ||||
| import java.time.Duration; | ||||
| import java.util.*; | ||||
| import java.util.ArrayList; | ||||
| import java.util.Base64; | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * The base Ollama API class. | ||||
| @@ -338,7 +339,6 @@ public class OllamaAPI { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server. This is a sync/blocking | ||||
|      * call. | ||||
| @@ -351,10 +351,9 @@ public class OllamaAPI { | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) | ||||
|     public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
| @@ -362,37 +361,13 @@ public class OllamaAPI { | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @param model   Model to use | ||||
|      * @param prompt  Prompt text | ||||
|      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||
|      * @param options Additional Options | ||||
|      * @return OllamaResult | ||||
|      * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options) | ||||
|     public OllamaResult generate(String model, String prompt, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generate(model, prompt, raw, options, null); | ||||
|         return generate(model, prompt, options, null); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     public OllamaToolsResult generateWithTools(String model, String prompt, boolean raw, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||
|         Map<ToolDef, Object> toolResults = new HashMap<>(); | ||||
|  | ||||
|         OllamaResult result = generate(model, prompt, raw, options, null); | ||||
|         toolResult.setModelResult(result); | ||||
|  | ||||
|         List<ToolDef> toolDefs = Utils.getObjectMapper().readValue(result.getResponse(), Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolDef.class)); | ||||
|         for (ToolDef toolDef : toolDefs) { | ||||
|             toolResults.put(toolDef, invokeTool(toolDef)); | ||||
|         } | ||||
|         toolResult.setToolResults(toolResults); | ||||
|         return toolResult; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||
|      * that can be used to check for status and get the response from the model later. This would be | ||||
| @@ -402,9 +377,9 @@ public class OllamaAPI { | ||||
|      * @param prompt the prompt/question text | ||||
|      * @return the ollama async result callback handle | ||||
|      */ | ||||
|     public OllamaAsyncResultCallback generateAsync(String model, String prompt, boolean raw) { | ||||
|     public OllamaAsyncResultCallback generateAsync(String model, String prompt) { | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|  | ||||
|         URI uri = URI.create(this.host + "/api/generate"); | ||||
|         OllamaAsyncResultCallback ollamaAsyncResultCallback = | ||||
|                 new OllamaAsyncResultCallback( | ||||
| @@ -601,24 +576,4 @@ public class OllamaAPI { | ||||
|     private boolean isBasicAuthCredentialsSet() { | ||||
|         return basicAuth != null; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     public void registerTool(MistralTools.ToolSpecification toolSpecification) { | ||||
|         ToolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||
|     } | ||||
|  | ||||
|     private Object invokeTool(ToolDef toolDef) { | ||||
|         try { | ||||
|             String methodName = toolDef.getName(); | ||||
|             Map<String, Object> arguments = toolDef.getArguments(); | ||||
|             DynamicFunction function = ToolRegistry.getFunction(methodName); | ||||
|             if (function == null) { | ||||
|                 throw new IllegalArgumentException("No such tool: " + methodName); | ||||
|             } | ||||
|             return function.apply(arguments); | ||||
|         } catch (Exception e) { | ||||
|             e.printStackTrace(); | ||||
|             return "Error calling tool: " + e.getMessage(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -16,16 +16,10 @@ public class OllamaChatResult extends OllamaResult{ | ||||
|             List<OllamaChatMessage> chatHistory) { | ||||
|         super(response, responseTime, httpStatusCode); | ||||
|         this.chatHistory = chatHistory; | ||||
|         appendAnswerToChatHistory(response); | ||||
|     } | ||||
|  | ||||
|     public List<OllamaChatMessage> getChatHistory() { | ||||
|         return chatHistory; | ||||
|     }  | ||||
|  | ||||
|     private void appendAnswerToChatHistory(String answer){ | ||||
|         OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer); | ||||
|         this.chatHistory.add(assistantMessage); | ||||
|     } | ||||
|      | ||||
|      | ||||
|   | ||||
| @@ -11,8 +11,6 @@ public class OllamaChatStreamObserver { | ||||
|  | ||||
|     private List<OllamaChatResponseModel> responseParts = new ArrayList<>(); | ||||
|  | ||||
|     private String message = ""; | ||||
|  | ||||
|     public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) { | ||||
|         this.streamHandler = streamHandler; | ||||
|     } | ||||
| @@ -23,8 +21,7 @@ public class OllamaChatStreamObserver { | ||||
|     } | ||||
|      | ||||
|     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){ | ||||
|         message = message + currentResponsePart.getMessage().getContent(); | ||||
|         streamHandler.accept(message); | ||||
|         streamHandler.accept(currentResponsePart.getMessage().getContent()); | ||||
|     } | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,9 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
|  | ||||
| import java.io.IOException; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| @@ -9,19 +13,15 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRespo | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.IOException; | ||||
|  | ||||
| public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { | ||||
| public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | ||||
|  | ||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); | ||||
|  | ||||
|     private OllamaGenerateStreamObserver streamObserver; | ||||
|  | ||||
|     public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose);    | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
| @@ -31,22 +31,24 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { | ||||
|  | ||||
|     @Override | ||||
|     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||
|         try { | ||||
|             OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||
|             responseBuffer.append(ollamaResponseModel.getResponse()); | ||||
|             if (streamObserver != null) { | ||||
|                 streamObserver.notify(ollamaResponseModel); | ||||
|             } | ||||
|             return ollamaResponseModel.isDone(); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             LOG.error("Error parsing the Ollama chat response!", e); | ||||
|             return true; | ||||
|         } | ||||
|                 try { | ||||
|                     OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||
|                     responseBuffer.append(ollamaResponseModel.getResponse()); | ||||
|                     if(streamObserver != null) { | ||||
|                         streamObserver.notify(ollamaResponseModel); | ||||
|                     } | ||||
|                     return ollamaResponseModel.isDone(); | ||||
|                 } catch (JsonProcessingException e) { | ||||
|                     LOG.error("Error parsing the Ollama chat response!",e); | ||||
|                     return true; | ||||
|                 }          | ||||
|     } | ||||
|  | ||||
|     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         streamObserver = new OllamaGenerateStreamObserver(streamHandler); | ||||
|         return super.callSync(body); | ||||
|         throws OllamaBaseException, IOException, InterruptedException { | ||||
|     streamObserver = new OllamaGenerateStreamObserver(streamHandler); | ||||
|     return super.callSync(body); | ||||
|     } | ||||
|      | ||||
|      | ||||
| } | ||||
|   | ||||
| @@ -1,8 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import java.util.Map; | ||||
|  | ||||
| @FunctionalInterface | ||||
| public interface DynamicFunction { | ||||
|     Object apply(Map<String, Object> arguments); | ||||
| } | ||||
| @@ -1,139 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonIgnore; | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonInclude; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import lombok.Builder; | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.ArrayList; | ||||
| import java.util.HashMap; | ||||
| import java.util.List; | ||||
| import java.util.Map; | ||||
|  | ||||
| public class MistralTools { | ||||
|     @Data | ||||
|     @Builder | ||||
|     public static class ToolSpecification { | ||||
|         private String functionName; | ||||
|         private String functionDesc; | ||||
|         private Map<String, PromptFuncDefinition.Property> props; | ||||
|         private DynamicFunction toolDefinition; | ||||
|     } | ||||
|  | ||||
|     @Data | ||||
|     @JsonIgnoreProperties(ignoreUnknown = true) | ||||
|     public static class PromptFuncDefinition { | ||||
|         private String type; | ||||
|         private PromptFuncSpec function; | ||||
|  | ||||
|         @Data | ||||
|         public static class PromptFuncSpec { | ||||
|             private String name; | ||||
|             private String description; | ||||
|             private Parameters parameters; | ||||
|         } | ||||
|  | ||||
|         @Data | ||||
|         public static class Parameters { | ||||
|             private String type; | ||||
|             private Map<String, Property> properties; | ||||
|             private List<String> required; | ||||
|         } | ||||
|  | ||||
|         @Data | ||||
|         @Builder | ||||
|         public static class Property { | ||||
|             private String type; | ||||
|             private String description; | ||||
|             @JsonProperty("enum") | ||||
|             @JsonInclude(JsonInclude.Include.NON_NULL) | ||||
|             private List<String> enumValues; | ||||
|             @JsonIgnore | ||||
|             private boolean required; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     public static class PropsBuilder { | ||||
|         private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>(); | ||||
|  | ||||
|         public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) { | ||||
|             props.put(key, property); | ||||
|             return this; | ||||
|         } | ||||
|  | ||||
|         public Map<String, PromptFuncDefinition.Property> build() { | ||||
|             return props; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     public static class PromptBuilder { | ||||
|         private final List<PromptFuncDefinition> tools = new ArrayList<>(); | ||||
|  | ||||
|         private String promptText; | ||||
|  | ||||
|         public String build() throws JsonProcessingException { | ||||
|             return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]"; | ||||
|         } | ||||
|  | ||||
|         public PromptBuilder withPrompt(String prompt) throws JsonProcessingException { | ||||
|             promptText = prompt; | ||||
|             return this; | ||||
|         } | ||||
|  | ||||
|         public PromptBuilder withToolSpecification(ToolSpecification spec) { | ||||
|             PromptFuncDefinition def = new PromptFuncDefinition(); | ||||
|             def.setType("function"); | ||||
|  | ||||
|             PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||
|             functionDetail.setName(spec.getFunctionName()); | ||||
|             functionDetail.setDescription(spec.getFunctionDesc()); | ||||
|  | ||||
|             PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||
|             parameters.setType("object"); | ||||
|             parameters.setProperties(spec.getProps()); | ||||
|  | ||||
|             List<String> requiredValues = new ArrayList<>(); | ||||
|             for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProps().entrySet()) { | ||||
|                 if (p.getValue().isRequired()) { | ||||
|                     requiredValues.add(p.getKey()); | ||||
|                 } | ||||
|             } | ||||
|             parameters.setRequired(requiredValues); | ||||
|             functionDetail.setParameters(parameters); | ||||
|             def.setFunction(functionDetail); | ||||
|  | ||||
|             tools.add(def); | ||||
|             return this; | ||||
|         } | ||||
| // | ||||
| //        public PromptBuilder withToolSpecification(String functionName, String functionDesc, Map<String, PromptFuncDefinition.Property> props) { | ||||
| //            PromptFuncDefinition def = new PromptFuncDefinition(); | ||||
| //            def.setType("function"); | ||||
| // | ||||
| //            PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||
| //            functionDetail.setName(functionName); | ||||
| //            functionDetail.setDescription(functionDesc); | ||||
| // | ||||
| //            PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||
| //            parameters.setType("object"); | ||||
| //            parameters.setProperties(props); | ||||
| // | ||||
| //            List<String> requiredValues = new ArrayList<>(); | ||||
| //            for (Map.Entry<String, PromptFuncDefinition.Property> p : props.entrySet()) { | ||||
| //                if (p.getValue().isRequired()) { | ||||
| //                    requiredValues.add(p.getKey()); | ||||
| //                } | ||||
| //            } | ||||
| //            parameters.setRequired(requiredValues); | ||||
| //            functionDetail.setParameters(parameters); | ||||
| //            def.setFunction(functionDetail); | ||||
| // | ||||
| //            tools.add(def); | ||||
| //            return this; | ||||
| //        } | ||||
|     } | ||||
| } | ||||
| @@ -1,16 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|  | ||||
| import java.util.Map; | ||||
|  | ||||
| @Data | ||||
| @NoArgsConstructor | ||||
| @AllArgsConstructor | ||||
| public class OllamaToolsResult { | ||||
|     private OllamaResult modelResult; | ||||
|     private Map<ToolDef, Object> toolResults; | ||||
| } | ||||
| @@ -1,18 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|  | ||||
| import java.util.Map; | ||||
|  | ||||
| @Data | ||||
| @AllArgsConstructor | ||||
| @NoArgsConstructor | ||||
| public class ToolDef { | ||||
|  | ||||
|     private String name; | ||||
|     private Map<String, Object> arguments; | ||||
|  | ||||
| } | ||||
|  | ||||
| @@ -1,17 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import java.util.HashMap; | ||||
| import java.util.Map; | ||||
|  | ||||
| public class ToolRegistry { | ||||
|     private static final Map<String, DynamicFunction> functionMap = new HashMap<>(); | ||||
|  | ||||
|  | ||||
|     public static DynamicFunction getFunction(String name) { | ||||
|         return functionMap.get(name); | ||||
|     } | ||||
|  | ||||
|     public static void addFunction(String name, DynamicFunction function) { | ||||
|         functionMap.put(name, function); | ||||
|     } | ||||
| } | ||||
| @@ -9,9 +9,6 @@ package io.github.amithkoujalgi.ollama4j.core.types; | ||||
| @SuppressWarnings("ALL") | ||||
| public class OllamaModelType { | ||||
|     public static final String GEMMA = "gemma"; | ||||
|     public static final String GEMMA2 = "gemma2"; | ||||
|  | ||||
|  | ||||
|     public static final String LLAMA2 = "llama2"; | ||||
|     public static final String LLAMA3 = "llama3"; | ||||
|     public static final String MISTRAL = "mistral"; | ||||
| @@ -33,8 +30,6 @@ public class OllamaModelType { | ||||
|     public static final String ZEPHYR = "zephyr"; | ||||
|     public static final String OPENHERMES = "openhermes"; | ||||
|     public static final String QWEN = "qwen"; | ||||
|  | ||||
|     public static final String QWEN2 = "qwen2"; | ||||
|     public static final String WIZARDCODER = "wizardcoder"; | ||||
|     public static final String LLAMA2_CHINESE = "llama2-chinese"; | ||||
|     public static final String TINYLLAMA = "tinyllama"; | ||||
| @@ -84,5 +79,4 @@ public class OllamaModelType { | ||||
|     public static final String NOTUS = "notus"; | ||||
|     public static final String DUCKDB_NSQL = "duckdb-nsql"; | ||||
|     public static final String ALL_MINILM = "all-minilm"; | ||||
|     public static final String CODESTRAL = "codestral"; | ||||
| } | ||||
|   | ||||
| @@ -1,5 +1,7 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.integrationtests; | ||||
|  | ||||
| import static org.junit.jupiter.api.Assertions.*; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||
| @@ -8,16 +10,9 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
| import lombok.Data; | ||||
| import org.junit.jupiter.api.BeforeEach; | ||||
| import org.junit.jupiter.api.Order; | ||||
| import org.junit.jupiter.api.Test; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.File; | ||||
| import java.io.IOException; | ||||
| import java.io.InputStream; | ||||
| @@ -27,369 +22,372 @@ import java.net.http.HttpConnectTimeoutException; | ||||
| import java.util.List; | ||||
| import java.util.Objects; | ||||
| import java.util.Properties; | ||||
|  | ||||
| import static org.junit.jupiter.api.Assertions.*; | ||||
| import lombok.Data; | ||||
| import org.junit.jupiter.api.BeforeEach; | ||||
| import org.junit.jupiter.api.Order; | ||||
| import org.junit.jupiter.api.Test; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| class TestRealAPIs { | ||||
|  | ||||
|     private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); | ||||
|   private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); | ||||
|  | ||||
|     OllamaAPI ollamaAPI; | ||||
|     Config config; | ||||
|   OllamaAPI ollamaAPI; | ||||
|   Config config; | ||||
|  | ||||
|     private File getImageFileFromClasspath(String fileName) { | ||||
|         ClassLoader classLoader = getClass().getClassLoader(); | ||||
|         return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); | ||||
|   private File getImageFileFromClasspath(String fileName) { | ||||
|     ClassLoader classLoader = getClass().getClassLoader(); | ||||
|     return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); | ||||
|   } | ||||
|  | ||||
|   @BeforeEach | ||||
|   void setUp() { | ||||
|     config = new Config(); | ||||
|     ollamaAPI = new OllamaAPI(config.getOllamaURL()); | ||||
|     ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds()); | ||||
|   } | ||||
|  | ||||
|   @Test | ||||
|   @Order(1) | ||||
|   void testWrongEndpoint() { | ||||
|     OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434"); | ||||
|     assertThrows(ConnectException.class, ollamaAPI::listModels); | ||||
|   } | ||||
|  | ||||
|   @Test | ||||
|   @Order(1) | ||||
|   void testEndpointReachability() { | ||||
|     try { | ||||
|       assertNotNull(ollamaAPI.listModels()); | ||||
|     } catch (HttpConnectTimeoutException e) { | ||||
|       fail(e.getMessage()); | ||||
|     } catch (Exception e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @BeforeEach | ||||
|     void setUp() { | ||||
|         config = new Config(); | ||||
|         ollamaAPI = new OllamaAPI(config.getOllamaURL()); | ||||
|         ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds()); | ||||
|   @Test | ||||
|   @Order(2) | ||||
|   void testListModels() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       assertNotNull(ollamaAPI.listModels()); | ||||
|       ollamaAPI.listModels().forEach(System.out::println); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(1) | ||||
|     void testWrongEndpoint() { | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434"); | ||||
|         assertThrows(ConnectException.class, ollamaAPI::listModels); | ||||
|   @Test | ||||
|   @Order(2) | ||||
|   void testPullModel() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       ollamaAPI.pullModel(config.getModel()); | ||||
|       boolean found = | ||||
|           ollamaAPI.listModels().stream() | ||||
|               .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); | ||||
|       assertTrue(found); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(1) | ||||
|     void testEndpointReachability() { | ||||
|         try { | ||||
|             assertNotNull(ollamaAPI.listModels()); | ||||
|         } catch (HttpConnectTimeoutException e) { | ||||
|             fail(e.getMessage()); | ||||
|         } catch (Exception e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testListDtails() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); | ||||
|       assertNotNull(modelDetails); | ||||
|       System.out.println(modelDetails); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(2) | ||||
|     void testListModels() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             assertNotNull(ollamaAPI.listModels()); | ||||
|             ollamaAPI.listModels().forEach(System.out::println); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithDefaultOptions() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaResult result = | ||||
|           ollamaAPI.generate( | ||||
|               config.getModel(), | ||||
|               "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|               new OptionsBuilder().build()); | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(2) | ||||
|     void testPullModel() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             ollamaAPI.pullModel(config.getModel()); | ||||
|             boolean found = | ||||
|                     ollamaAPI.listModels().stream() | ||||
|                             .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); | ||||
|             assertTrue(found); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithDefaultOptionsStreamed() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|  | ||||
|       StringBuffer sb = new StringBuffer(""); | ||||
|  | ||||
|       OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||
|           "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|           new OptionsBuilder().build(), (s) -> { | ||||
|             LOG.info(s); | ||||
|             String substring = s.substring(sb.toString().length(), s.length()); | ||||
|             LOG.info(substring); | ||||
|             sb.append(substring); | ||||
|           }); | ||||
|  | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|       assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testListDtails() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); | ||||
|             assertNotNull(modelDetails); | ||||
|             System.out.println(modelDetails); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithOptions() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaResult result = | ||||
|           ollamaAPI.generate( | ||||
|               config.getModel(), | ||||
|               "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|               new OptionsBuilder().setTemperature(0.9f).build()); | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithDefaultOptions() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaResult result = | ||||
|                     ollamaAPI.generate( | ||||
|                             config.getModel(), | ||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|                             false, | ||||
|                             new OptionsBuilder().build()); | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testChat() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||
|              .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") | ||||
|              .withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?") | ||||
|              .build(); | ||||
|  | ||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|       assertNotNull(chatResult); | ||||
|       assertFalse(chatResult.getResponse().isBlank()); | ||||
|       assertEquals(4,chatResult.getChatHistory().size()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithDefaultOptionsStreamed() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             StringBuffer sb = new StringBuffer(""); | ||||
|             OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||
|                     "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|                     false, | ||||
|                     new OptionsBuilder().build(), (s) -> { | ||||
|                         LOG.info(s); | ||||
|                         String substring = s.substring(sb.toString().length(), s.length()); | ||||
|                         LOG.info(substring); | ||||
|                         sb.append(substring); | ||||
|                     }); | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testChatWithSystemPrompt() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, | ||||
|           "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||
|           .withMessage(OllamaChatMessageRole.USER, | ||||
|               "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|           .build(); | ||||
|  | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|             assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|       assertNotNull(chatResult); | ||||
|       assertFalse(chatResult.getResponse().isBlank()); | ||||
|       assertTrue(chatResult.getResponse().startsWith("NI")); | ||||
|       assertEquals(3, chatResult.getChatHistory().size()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithOptions() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaResult result = | ||||
|                     ollamaAPI.generate( | ||||
|                             config.getModel(), | ||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||
|                             true, | ||||
|                             new OptionsBuilder().setTemperature(0.9f).build()); | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testChatWithStream() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||
|               "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|           .build(); | ||||
|  | ||||
|       StringBuffer sb = new StringBuffer(""); | ||||
|  | ||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> { | ||||
|         LOG.info(s); | ||||
|         String substring = s.substring(sb.toString().length(), s.length()); | ||||
|         LOG.info(substring); | ||||
|         sb.append(substring); | ||||
|       }); | ||||
|       assertNotNull(chatResult); | ||||
|       assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testChat() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||
|                     .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") | ||||
|                     .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") | ||||
|                     .build(); | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testChatWithImageFromFileWithHistoryRecognition() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaChatRequestBuilder builder = | ||||
|           OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||
|       OllamaChatRequestModel requestModel = | ||||
|           builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||
|               List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||
|  | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|             assertNotNull(chatResult); | ||||
|             assertFalse(chatResult.getResponse().isBlank()); | ||||
|             assertEquals(4, chatResult.getChatHistory().size()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|       assertNotNull(chatResult); | ||||
|       assertNotNull(chatResult.getResponse()); | ||||
|  | ||||
|       builder.reset(); | ||||
|  | ||||
|       requestModel = | ||||
|           builder.withMessages(chatResult.getChatHistory()) | ||||
|             .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||
|  | ||||
|       chatResult = ollamaAPI.chat(requestModel); | ||||
|       assertNotNull(chatResult); | ||||
|       assertNotNull(chatResult.getResponse()); | ||||
|  | ||||
|  | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testChatWithSystemPrompt() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, | ||||
|                             "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||
|                     .withMessage(OllamaChatMessageRole.USER, | ||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|                     .build(); | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testChatWithImageFromURL() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||
|       OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||
|       "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") | ||||
|              .build(); | ||||
|  | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|             assertNotNull(chatResult); | ||||
|             assertFalse(chatResult.getResponse().isBlank()); | ||||
|             assertTrue(chatResult.getResponse().startsWith("NI")); | ||||
|             assertEquals(3, chatResult.getChatHistory().size()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|       assertNotNull(chatResult); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testChatWithStream() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|                     .build(); | ||||
|  | ||||
|             StringBuffer sb = new StringBuffer(""); | ||||
|  | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> { | ||||
|                 LOG.info(s); | ||||
|                 String substring = s.substring(sb.toString().length(), s.length()); | ||||
|                 LOG.info(substring); | ||||
|                 sb.append(substring); | ||||
|             }); | ||||
|             assertNotNull(chatResult); | ||||
|             assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithOptionsAndImageFiles() { | ||||
|     testEndpointReachability(); | ||||
|     File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||
|     try { | ||||
|       OllamaResult result = | ||||
|           ollamaAPI.generateWithImageFiles( | ||||
|               config.getImageModel(), | ||||
|               "What is in this image?", | ||||
|               List.of(imageFile), | ||||
|               new OptionsBuilder().build()); | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testChatWithImageFromFileWithHistoryRecognition() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaChatRequestBuilder builder = | ||||
|                     OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||
|             OllamaChatRequestModel requestModel = | ||||
|                     builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||
|                             List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithOptionsAndImageFilesStreamed() { | ||||
|     testEndpointReachability(); | ||||
|     File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||
|     try { | ||||
|       StringBuffer sb = new StringBuffer(""); | ||||
|  | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|             assertNotNull(chatResult); | ||||
|             assertNotNull(chatResult.getResponse()); | ||||
|  | ||||
|             builder.reset(); | ||||
|  | ||||
|             requestModel = | ||||
|                     builder.withMessages(chatResult.getChatHistory()) | ||||
|                             .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||
|  | ||||
|             chatResult = ollamaAPI.chat(requestModel); | ||||
|             assertNotNull(chatResult); | ||||
|             assertNotNull(chatResult.getResponse()); | ||||
|  | ||||
|  | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|       OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), | ||||
|           "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { | ||||
|             LOG.info(s); | ||||
|             String substring = s.substring(sb.toString().length(), s.length()); | ||||
|             LOG.info(substring); | ||||
|             sb.append(substring); | ||||
|           }); | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|       assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testChatWithImageFromURL() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||
|             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||
|                             "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") | ||||
|                     .build(); | ||||
|  | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|             assertNotNull(chatResult); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   void testAskModelWithOptionsAndImageURLs() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaResult result = | ||||
|           ollamaAPI.generateWithImageURLs( | ||||
|               config.getImageModel(), | ||||
|               "What is in this image?", | ||||
|               List.of( | ||||
|                   "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), | ||||
|               new OptionsBuilder().build()); | ||||
|       assertNotNull(result); | ||||
|       assertNotNull(result.getResponse()); | ||||
|       assertFalse(result.getResponse().isEmpty()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithOptionsAndImageFiles() { | ||||
|         testEndpointReachability(); | ||||
|         File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||
|         try { | ||||
|             OllamaResult result = | ||||
|                     ollamaAPI.generateWithImageFiles( | ||||
|                             config.getImageModel(), | ||||
|                             "What is in this image?", | ||||
|                             List.of(imageFile), | ||||
|                             new OptionsBuilder().build()); | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithOptionsAndImageFilesStreamed() { | ||||
|         testEndpointReachability(); | ||||
|         File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||
|         try { | ||||
|             StringBuffer sb = new StringBuffer(""); | ||||
|  | ||||
|             OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), | ||||
|                     "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { | ||||
|                         LOG.info(s); | ||||
|                         String substring = s.substring(sb.toString().length(), s.length()); | ||||
|                         LOG.info(substring); | ||||
|                         sb.append(substring); | ||||
|                     }); | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|             assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     void testAskModelWithOptionsAndImageURLs() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaResult result = | ||||
|                     ollamaAPI.generateWithImageURLs( | ||||
|                             config.getImageModel(), | ||||
|                             "What is in this image?", | ||||
|                             List.of( | ||||
|                                     "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), | ||||
|                             new OptionsBuilder().build()); | ||||
|             assertNotNull(result); | ||||
|             assertNotNull(result.getResponse()); | ||||
|             assertFalse(result.getResponse().isEmpty()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     @Test | ||||
|     @Order(3) | ||||
|     public void testEmbedding() { | ||||
|         testEndpointReachability(); | ||||
|         try { | ||||
|             OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder | ||||
|                     .getInstance(config.getModel(), "What is the capital of France?").build(); | ||||
|  | ||||
|             List<Double> embeddings = ollamaAPI.generateEmbeddings(request); | ||||
|  | ||||
|             assertNotNull(embeddings); | ||||
|             assertFalse(embeddings.isEmpty()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             fail(e); | ||||
|         } | ||||
|   @Test | ||||
|   @Order(3) | ||||
|   public void testEmbedding() { | ||||
|     testEndpointReachability(); | ||||
|     try { | ||||
|       OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder | ||||
|           .getInstance(config.getModel(), "What is the capital of France?").build(); | ||||
|  | ||||
|       List<Double> embeddings = ollamaAPI.generateEmbeddings(request); | ||||
|  | ||||
|       assertNotNull(embeddings); | ||||
|       assertFalse(embeddings.isEmpty()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       fail(e); | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
| @Data | ||||
| class Config { | ||||
|     private String ollamaURL; | ||||
|     private String model; | ||||
|     private String imageModel; | ||||
|     private int requestTimeoutSeconds; | ||||
|   private String ollamaURL; | ||||
|   private String model; | ||||
|   private String imageModel; | ||||
|   private int requestTimeoutSeconds; | ||||
|  | ||||
|     public Config() { | ||||
|         Properties properties = new Properties(); | ||||
|         try (InputStream input = | ||||
|                      getClass().getClassLoader().getResourceAsStream("test-config.properties")) { | ||||
|             if (input == null) { | ||||
|                 throw new RuntimeException("Sorry, unable to find test-config.properties"); | ||||
|             } | ||||
|             properties.load(input); | ||||
|             this.ollamaURL = properties.getProperty("ollama.url"); | ||||
|             this.model = properties.getProperty("ollama.model"); | ||||
|             this.imageModel = properties.getProperty("ollama.model.image"); | ||||
|             this.requestTimeoutSeconds = | ||||
|                     Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds")); | ||||
|         } catch (IOException e) { | ||||
|             throw new RuntimeException("Error loading properties", e); | ||||
|         } | ||||
|   public Config() { | ||||
|     Properties properties = new Properties(); | ||||
|     try (InputStream input = | ||||
|         getClass().getClassLoader().getResourceAsStream("test-config.properties")) { | ||||
|       if (input == null) { | ||||
|         throw new RuntimeException("Sorry, unable to find test-config.properties"); | ||||
|       } | ||||
|       properties.load(input); | ||||
|       this.ollamaURL = properties.getProperty("ollama.url"); | ||||
|       this.model = properties.getProperty("ollama.model"); | ||||
|       this.imageModel = properties.getProperty("ollama.model.image"); | ||||
|       this.requestTimeoutSeconds = | ||||
|           Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds")); | ||||
|     } catch (IOException e) { | ||||
|       throw new RuntimeException("Error loading properties", e); | ||||
|     } | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -1,5 +1,7 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.unittests; | ||||
|  | ||||
| import static org.mockito.Mockito.*; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||
| @@ -7,158 +9,155 @@ import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
| import org.junit.jupiter.api.Test; | ||||
| import org.mockito.Mockito; | ||||
|  | ||||
| import java.io.IOException; | ||||
| import java.net.URISyntaxException; | ||||
| import java.util.ArrayList; | ||||
| import java.util.Collections; | ||||
|  | ||||
| import static org.mockito.Mockito.*; | ||||
| import org.junit.jupiter.api.Test; | ||||
| import org.mockito.Mockito; | ||||
|  | ||||
| class TestMockedAPIs { | ||||
|     @Test | ||||
|     void testPullModel() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         try { | ||||
|             doNothing().when(ollamaAPI).pullModel(model); | ||||
|             ollamaAPI.pullModel(model); | ||||
|             verify(ollamaAPI, times(1)).pullModel(model); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testPullModel() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     try { | ||||
|       doNothing().when(ollamaAPI).pullModel(model); | ||||
|       ollamaAPI.pullModel(model); | ||||
|       verify(ollamaAPI, times(1)).pullModel(model); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testListModels() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         try { | ||||
|             when(ollamaAPI.listModels()).thenReturn(new ArrayList<>()); | ||||
|             ollamaAPI.listModels(); | ||||
|             verify(ollamaAPI, times(1)).listModels(); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testListModels() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     try { | ||||
|       when(ollamaAPI.listModels()).thenReturn(new ArrayList<>()); | ||||
|       ollamaAPI.listModels(); | ||||
|       verify(ollamaAPI, times(1)).listModels(); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testCreateModel() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros."; | ||||
|         try { | ||||
|             doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath); | ||||
|             ollamaAPI.createModelWithModelFileContents(model, modelFilePath); | ||||
|             verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testCreateModel() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros."; | ||||
|     try { | ||||
|       doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath); | ||||
|       ollamaAPI.createModelWithModelFileContents(model, modelFilePath); | ||||
|       verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testDeleteModel() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         try { | ||||
|             doNothing().when(ollamaAPI).deleteModel(model, true); | ||||
|             ollamaAPI.deleteModel(model, true); | ||||
|             verify(ollamaAPI, times(1)).deleteModel(model, true); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testDeleteModel() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     try { | ||||
|       doNothing().when(ollamaAPI).deleteModel(model, true); | ||||
|       ollamaAPI.deleteModel(model, true); | ||||
|       verify(ollamaAPI, times(1)).deleteModel(model, true); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testGetModelDetails() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         try { | ||||
|             when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail()); | ||||
|             ollamaAPI.getModelDetails(model); | ||||
|             verify(ollamaAPI, times(1)).getModelDetails(model); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testGetModelDetails() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     try { | ||||
|       when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail()); | ||||
|       ollamaAPI.getModelDetails(model); | ||||
|       verify(ollamaAPI, times(1)).getModelDetails(model); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testGenerateEmbeddings() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String prompt = "some prompt text"; | ||||
|         try { | ||||
|             when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>()); | ||||
|             ollamaAPI.generateEmbeddings(model, prompt); | ||||
|             verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testGenerateEmbeddings() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String prompt = "some prompt text"; | ||||
|     try { | ||||
|       when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>()); | ||||
|       ollamaAPI.generateEmbeddings(model, prompt); | ||||
|       verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testAsk() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String prompt = "some prompt text"; | ||||
|         OptionsBuilder optionsBuilder = new OptionsBuilder(); | ||||
|         try { | ||||
|             when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build())) | ||||
|                     .thenReturn(new OllamaResult("", 0, 200)); | ||||
|             ollamaAPI.generate(model, prompt, false, optionsBuilder.build()); | ||||
|             verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testAsk() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String prompt = "some prompt text"; | ||||
|     OptionsBuilder optionsBuilder = new OptionsBuilder(); | ||||
|     try { | ||||
|       when(ollamaAPI.generate(model, prompt, optionsBuilder.build())) | ||||
|           .thenReturn(new OllamaResult("", 0, 200)); | ||||
|       ollamaAPI.generate(model, prompt, optionsBuilder.build()); | ||||
|       verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testAskWithImageFiles() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String prompt = "some prompt text"; | ||||
|         try { | ||||
|             when(ollamaAPI.generateWithImageFiles( | ||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||
|                     .thenReturn(new OllamaResult("", 0, 200)); | ||||
|             ollamaAPI.generateWithImageFiles( | ||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|             verify(ollamaAPI, times(1)) | ||||
|                     .generateWithImageFiles( | ||||
|                             model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testAskWithImageFiles() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String prompt = "some prompt text"; | ||||
|     try { | ||||
|       when(ollamaAPI.generateWithImageFiles( | ||||
|               model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||
|           .thenReturn(new OllamaResult("", 0, 200)); | ||||
|       ollamaAPI.generateWithImageFiles( | ||||
|           model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|       verify(ollamaAPI, times(1)) | ||||
|           .generateWithImageFiles( | ||||
|               model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testAskWithImageURLs() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String prompt = "some prompt text"; | ||||
|         try { | ||||
|             when(ollamaAPI.generateWithImageURLs( | ||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||
|                     .thenReturn(new OllamaResult("", 0, 200)); | ||||
|             ollamaAPI.generateWithImageURLs( | ||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|             verify(ollamaAPI, times(1)) | ||||
|                     .generateWithImageURLs( | ||||
|                             model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|   @Test | ||||
|   void testAskWithImageURLs() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String prompt = "some prompt text"; | ||||
|     try { | ||||
|       when(ollamaAPI.generateWithImageURLs( | ||||
|               model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||
|           .thenReturn(new OllamaResult("", 0, 200)); | ||||
|       ollamaAPI.generateWithImageURLs( | ||||
|           model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|       verify(ollamaAPI, times(1)) | ||||
|           .generateWithImageURLs( | ||||
|               model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||
|     } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||
|       throw new RuntimeException(e); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|     @Test | ||||
|     void testAskAsync() { | ||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|         String model = OllamaModelType.LLAMA2; | ||||
|         String prompt = "some prompt text"; | ||||
|         when(ollamaAPI.generateAsync(model, prompt, false)) | ||||
|                 .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); | ||||
|         ollamaAPI.generateAsync(model, prompt, false); | ||||
|         verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); | ||||
|     } | ||||
|   @Test | ||||
|   void testAskAsync() { | ||||
|     OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||
|     String model = OllamaModelType.LLAMA2; | ||||
|     String prompt = "some prompt text"; | ||||
|     when(ollamaAPI.generateAsync(model, prompt)) | ||||
|         .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); | ||||
|     ollamaAPI.generateAsync(model, prompt); | ||||
|     verify(ollamaAPI, times(1)).generateAsync(model, prompt); | ||||
|   } | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user