diff --git a/.github/workflows/build-on-pull-request.yml b/.github/workflows/build-on-pull-request.yml index f9e9277..758078d 100644 --- a/.github/workflows/build-on-pull-request.yml +++ b/.github/workflows/build-on-pull-request.yml @@ -20,13 +20,17 @@ jobs: permissions: contents: read + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: - uses: actions/checkout@v5 - - name: Set up JDK 11 + - name: Set up JDK 21 uses: actions/setup-java@v5 with: - java-version: '11' - distribution: 'adopt-hotspot' + java-version: '21' + distribution: 'oracle' server-id: github settings-path: ${{ github.workspace }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ad52d52..19f5c6e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -28,8 +28,8 @@ jobs: if: matrix.language == 'java' uses: actions/setup-java@v5 with: - distribution: temurin - java-version: '11' + distribution: oracle + java-version: '21' - name: Initialize CodeQL uses: github/codeql-action/init@v3 diff --git a/.github/workflows/gh-mvn-publish.yml b/.github/workflows/gh-mvn-publish.yml index d85b321..6d77e85 100644 --- a/.github/workflows/gh-mvn-publish.yml +++ b/.github/workflows/gh-mvn-publish.yml @@ -14,11 +14,11 @@ jobs: steps: - uses: actions/checkout@v5 - - name: Set up JDK 17 + - name: Set up JDK 21 uses: actions/setup-java@v5 with: - java-version: '17' - distribution: 'temurin' + java-version: '21' + distribution: 'oracle' server-id: github settings-path: ${{ github.workspace }} diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index 9dba04d..b6aa79a 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -26,11 +26,11 @@ jobs: steps: - uses: actions/checkout@v5 - - name: Set up JDK 17 + - name: Set up JDK 21 uses: actions/setup-java@v5 with: - java-version: '17' - distribution: 'temurin' + java-version: '21' + distribution: 'oracle' server-id: github # Value of the distributionManagement/repository/id field of the pom.xml settings-path: ${{ github.workspace }} # location for the settings.xml file diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 50ec9df..7aab5ff 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -30,11 +30,11 @@ jobs: url: ${{ steps.deployment.outputs.page_url }} steps: - uses: actions/checkout@v5 - - name: Set up JDK 11 + - name: Set up JDK 21 uses: actions/setup-java@v5 with: - java-version: '11' - distribution: 'adopt-hotspot' + java-version: '21' + distribution: 'oracle' server-id: github # Value of the distributionManagement/repository/id field of the pom.xml settings-path: ${{ github.workspace }} # location for the settings.xml file diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 44069fa..4583cb9 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -36,11 +36,11 @@ jobs: run: | curl -fsSL https://ollama.com/install.sh | sh - - name: Set up JDK 17 + - name: Set up JDK 21 uses: actions/setup-java@v5 with: - java-version: '17' - distribution: 'temurin' + java-version: '21' + distribution: 'oracle' server-id: github settings-path: ${{ github.workspace }} diff --git a/.gitignore b/.gitignore index 788123e..0c97cfe 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,4 @@ pom.xml.* release.properties !.idea/icon.svg -src/main/java/io/github/ollama4j/localtests \ No newline at end of file +src/main/java/io/github/ollama4j/localtests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7c1bf5c..94d3f75 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,11 +21,19 @@ repos: # for commit message formatting - repo: https://github.com/commitizen-tools/commitizen - rev: v4.9.0 + rev: v4.9.1 hooks: - id: commitizen stages: [commit-msg] + - repo: local + hooks: + - id: format-code + name: Format Code + entry: make apply-formatting + language: system + always_run: true + # # for java code quality # - repo: https://github.com/gherynos/pre-commit-java # rev: v0.6.10 diff --git a/LICENSE b/LICENSE index 85c8a43..883ee94 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Amith Koujalgi +Copyright (c) 2023 Amith Koujalgi and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Makefile b/Makefile index 2753d0e..7b5ad0c 100644 --- a/Makefile +++ b/Makefile @@ -2,41 +2,70 @@ dev: @echo "Setting up dev environment..." @command -v pre-commit >/dev/null 2>&1 || { echo "Error: pre-commit is not installed. Please install it first."; exit 1; } @command -v docker >/dev/null 2>&1 || { echo "Error: docker is not installed. Please install it first."; exit 1; } - pre-commit install - pre-commit autoupdate - pre-commit install --install-hooks + @pre-commit install + @pre-commit autoupdate + @pre-commit install --install-hooks -build: - mvn -B clean install -Dgpg.skip=true +check-formatting: + @echo "\033[0;34mChecking code formatting...\033[0m" + @mvn spotless:check -full-build: - mvn -B clean install +apply-formatting: + @echo "\033[0;32mApplying code formatting...\033[0m" + @mvn spotless:apply -unit-tests: - mvn clean test -Punit-tests +build: apply-formatting + @echo "\033[0;34mBuilding project (GPG skipped)...\033[0m" + @mvn -B clean install -Dgpg.skip=true -Dmaven.javadoc.skip=true -integration-tests: - export USE_EXTERNAL_OLLAMA_HOST=false && mvn clean verify -Pintegration-tests +full-build: apply-formatting + @echo "\033[0;34mPerforming full build...\033[0m" + @mvn -B clean install -integration-tests-remote: - export USE_EXTERNAL_OLLAMA_HOST=true && export OLLAMA_HOST=http://192.168.29.223:11434 && mvn clean verify -Pintegration-tests -Dgpg.skip=true +unit-tests: apply-formatting + @echo "\033[0;34mRunning unit tests...\033[0m" + @mvn clean test -Punit-tests + +integration-tests: apply-formatting + @echo "\033[0;34mRunning integration tests (local)...\033[0m" + @export USE_EXTERNAL_OLLAMA_HOST=false && mvn clean verify -Pintegration-tests + +integration-tests-remote: apply-formatting + @echo "\033[0;34mRunning integration tests (remote)...\033[0m" + @export USE_EXTERNAL_OLLAMA_HOST=true && export OLLAMA_HOST=http://192.168.29.229:11434 && mvn clean verify -Pintegration-tests -Dgpg.skip=true doxygen: - doxygen Doxyfile + @echo "\033[0;34mGenerating documentation with Doxygen...\033[0m" + @doxygen Doxyfile + +javadoc: + @echo "\033[0;34mGenerating Javadocs into '$(javadocfolder)'...\033[0m" + @mvn clean javadoc:javadoc + @if [ -f "target/reports/apidocs/index.html" ]; then \ + echo "\033[0;32mJavadocs generated in target/reports/apidocs/index.html\033[0m"; \ + else \ + echo "\033[0;31mFailed to generate Javadocs in target/reports/apidocs\033[0m"; \ + exit 1; \ + fi list-releases: - curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \ + @echo "\033[0;34mListing latest releases...\033[0m" + @curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \ --compressed \ --silent | jq -r '.components[].version' docs-build: - npm i --prefix docs && npm run build --prefix docs + @echo "\033[0;34mBuilding documentation site...\033[0m" + @cd ./docs && npm ci --no-audit --fund=false && npm run build docs-serve: - npm i --prefix docs && npm run start --prefix docs + @echo "\033[0;34mServing documentation site...\033[0m" + @cd ./docs && npm install && npm run start start-cpu: - docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama + @echo "\033[0;34mStarting Ollama (CPU mode)...\033[0m" + @docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama start-gpu: - docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama \ No newline at end of file + @echo "\033[0;34mStarting Ollama (GPU mode)...\033[0m" + @docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama \ No newline at end of file diff --git a/README.md b/README.md index 37e35fc..19a7e7d 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@
ollama4j-icon - ### Ollama4j +### Ollama4j +
@@ -40,15 +41,52 @@ _Find more details on the **[website](https://ollama4j.github.io/ollama4j/)**._ ## Table of Contents +- [Capabilities](#capabilities) - [How does it work?](#how-does-it-work) - [Requirements](#requirements) -- [Installation](#installation) -- [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management) +- [Usage](#usage) + - [For Maven](#for-maven) + - [Using Maven Central](#using-maven-central) + - [Using GitHub's Maven Package Repository](#using-githubs-maven-package-repository) + - [For Gradle](#for-gradle) +- [API Spec](#api-spec) - [Examples](#examples) -- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) - [Development](#development) -- [Contributions](#get-involved) -- [References](#references) + - [Setup dev environment](#setup-dev-environment) + - [Build](#build) + - [Run unit tests](#run-unit-tests) + - [Run integration tests](#run-integration-tests) + - [Releases](#releases) +- [Get Involved](#get-involved) +- [Who's using Ollama4j?](#whos-using-ollama4j) +- [Growth](#growth) + - [References](#references) + - [Credits](#credits) + - [Appreciate the work?](#appreciate-the-work) + +## Capabilities + +- **Text generation**: Single-turn `generate` with optional streaming and advanced options +- **Chat**: Multi-turn chat with conversation history and roles +- **Tool/function calling**: Built-in tool invocation via annotations and tool specs +- **Reasoning/thinking modes**: Generate and chat with “thinking” outputs where supported +- **Image inputs (multimodal)**: Generate with images as inputs where models support vision +- **Embeddings**: Create vector embeddings for text +- **Async generation**: Fire-and-forget style generation APIs +- **Custom roles**: Define and use custom chat roles +- **Model management**: List, pull, create, delete, and get model details +- **Connectivity utilities**: Server `ping` and process status (`ps`) +- **Authentication**: Basic auth and bearer token support +- **Options builder**: Type-safe builder for model parameters and request options +- **Timeouts**: Configure connect/read/write timeouts +- **Logging**: Built-in logging hooks for requests and responses +- **Metrics & Monitoring** 🆕: Built-in Prometheus metrics export for real-time monitoring of requests, model usage, and + performance. *(Beta feature – feedback/contributions welcome!)* - + Checkout [ollama4j-examples](https://github.com/ollama4j/ollama4j-examples) repository for details. + +
+ ollama4j-icon +
## How does it work? @@ -73,7 +111,7 @@ _Find more details on the **[website](https://ollama4j.github.io/ollama4j/)**._

-## Installation +## Usage > [!NOTE] > We are now publishing the artifacts to both Maven Central and GitHub package repositories. @@ -182,7 +220,7 @@ dependencies { [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray -#### API Spec +### API Spec > [!TIP] > Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/). diff --git a/docs/METRICS.md b/docs/METRICS.md new file mode 100644 index 0000000..9261a99 --- /dev/null +++ b/docs/METRICS.md @@ -0,0 +1,184 @@ +# Prometheus Metrics Integration + +Ollama4j now includes comprehensive Prometheus metrics collection to help you monitor and observe your Ollama API usage. This feature allows you to track request counts, response times, model usage, and other operational metrics. + +## Features + +The metrics integration provides the following metrics: + +- **Request Metrics**: Total requests, duration histograms, and response time summaries by endpoint +- **Model Usage**: Model-specific usage statistics and response times +- **Token Generation**: Token count tracking per model +- **Error Tracking**: Error counts by type and endpoint +- **Active Connections**: Current number of active API connections + +## Quick Start + +### 1. Enable Metrics Collection + +```java +import io.github.ollama4j.OllamaAPI; + +// Create API instance with metrics enabled +OllamaAPI ollamaAPI = new OllamaAPI(); +ollamaAPI.setMetricsEnabled(true); +``` + +### 2. Start Metrics Server + +```java +import io.prometheus.client.exporter.HTTPServer; + +// Start Prometheus metrics HTTP server on port 8080 +HTTPServer metricsServer = new HTTPServer(8080); +System.out.println("Metrics available at: http://localhost:8080/metrics"); +``` + +### 3. Use the API (Metrics are automatically collected) + +```java +// All API calls are automatically instrumented +boolean isReachable = ollamaAPI.ping(); + +Map format = new HashMap<>(); +format.put("type", "json"); +OllamaResult result = ollamaAPI.generateWithFormat( + "llama2", + "Generate a JSON object", + format +); +``` + +## Available Metrics + +### Request Metrics + +- `ollama_api_requests_total` - Total number of API requests by endpoint, method, and status +- `ollama_api_request_duration_seconds` - Request duration histogram by endpoint and method +- `ollama_api_response_time_seconds` - Response time summary with percentiles + +### Model Metrics + +- `ollama_model_usage_total` - Model usage count by model name and operation +- `ollama_model_response_time_seconds` - Model response time histogram +- `ollama_tokens_generated_total` - Total tokens generated by model + +### System Metrics + +- `ollama_api_active_connections` - Current number of active connections +- `ollama_api_errors_total` - Error count by endpoint and error type + +## Example Metrics Output + +``` +# HELP ollama_api_requests_total Total number of Ollama API requests +# TYPE ollama_api_requests_total counter +ollama_api_requests_total{endpoint="/api/generate",method="POST",status="success"} 5.0 +ollama_api_requests_total{endpoint="/api/embed",method="POST",status="success"} 3.0 + +# HELP ollama_api_request_duration_seconds Duration of Ollama API requests in seconds +# TYPE ollama_api_request_duration_seconds histogram +ollama_api_request_duration_seconds_bucket{endpoint="/api/generate",method="POST",le="0.1"} 0.0 +ollama_api_request_duration_seconds_bucket{endpoint="/api/generate",method="POST",le="0.5"} 2.0 +ollama_api_request_duration_seconds_bucket{endpoint="/api/generate",method="POST",le="1.0"} 4.0 +ollama_api_request_duration_seconds_bucket{endpoint="/api/generate",method="POST",le="+Inf"} 5.0 +ollama_api_request_duration_seconds_sum{endpoint="/api/generate",method="POST"} 2.5 +ollama_api_request_duration_seconds_count{endpoint="/api/generate",method="POST"} 5.0 + +# HELP ollama_model_usage_total Total number of model usage requests +# TYPE ollama_model_usage_total counter +ollama_model_usage_total{model_name="llama2",operation="generate_with_format"} 5.0 +ollama_model_usage_total{model_name="llama2",operation="embed"} 3.0 + +# HELP ollama_tokens_generated_total Total number of tokens generated +# TYPE ollama_tokens_generated_total counter +ollama_tokens_generated_total{model_name="llama2"} 150.0 +``` + +## Configuration + +### Enable/Disable Metrics + +```java +OllamaAPI ollamaAPI = new OllamaAPI(); + +// Enable metrics collection +ollamaAPI.setMetricsEnabled(true); + +// Disable metrics collection (default) +ollamaAPI.setMetricsEnabled(false); +``` + +### Custom Metrics Server + +```java +import io.prometheus.client.exporter.HTTPServer; + +// Start on custom port +HTTPServer metricsServer = new HTTPServer(9090); + +// Start on custom host and port +HTTPServer metricsServer = new HTTPServer("0.0.0.0", 9090); +``` + +## Integration with Prometheus + +### Prometheus Configuration + +Add this to your `prometheus.yml`: + +```yaml +scrape_configs: + - job_name: 'ollama4j' + static_configs: + - targets: ['localhost:8080'] + scrape_interval: 15s +``` + +### Grafana Dashboards + +You can create Grafana dashboards using the metrics. Some useful queries: + +- **Request Rate**: `rate(ollama_api_requests_total[5m])` +- **Average Response Time**: `rate(ollama_api_request_duration_seconds_sum[5m]) / rate(ollama_api_request_duration_seconds_count[5m])` +- **Error Rate**: `rate(ollama_api_requests_total{status="error"}[5m]) / rate(ollama_api_requests_total[5m])` +- **Model Usage**: `rate(ollama_model_usage_total[5m])` +- **Token Generation Rate**: `rate(ollama_tokens_generated_total[5m])` + +## Performance Considerations + +- Metrics collection adds minimal overhead (~1-2% in most cases) +- Metrics are collected asynchronously and don't block API calls +- You can disable metrics in production if needed: `ollamaAPI.setMetricsEnabled(false)` +- The metrics server uses minimal resources + +## Troubleshooting + +### Metrics Not Appearing + +1. Ensure metrics are enabled: `ollamaAPI.setMetricsEnabled(true)` +2. Check that the metrics server is running: `http://localhost:8080/metrics` +3. Verify API calls are being made (metrics only appear after API usage) + +### High Memory Usage + +- Metrics accumulate over time. Consider restarting your application periodically +- Use Prometheus to scrape metrics regularly to avoid accumulation + +### Custom Metrics + +You can extend the metrics by accessing the Prometheus registry directly: + +```java +import io.prometheus.client.CollectorRegistry; +import io.prometheus.client.Counter; + +// Create custom metrics +Counter customCounter = Counter.build() + .name("my_custom_metric_total") + .help("My custom metric") + .register(); + +// Use the metric +customCounter.inc(); +``` diff --git a/docs/blog/2025-03-08-blog/index.md b/docs/blog/2025-03-08-blog/index.md index b702f39..347ed86 100644 --- a/docs/blog/2025-03-08-blog/index.md +++ b/docs/blog/2025-03-08-blog/index.md @@ -337,7 +337,7 @@ import com.couchbase.client.java.Scope; import com.couchbase.client.java.json.JsonObject; import com.couchbase.client.java.query.QueryResult; import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.exceptions.ToolInvocationException; import io.github.ollama4j.tools.OllamaToolsResult; import io.github.ollama4j.tools.ToolFunction; @@ -356,7 +356,7 @@ import java.util.Map; public class CouchbaseToolCallingExample { - public static void main(String[] args) throws IOException, ToolInvocationException, OllamaBaseException, InterruptedException { + public static void main(String[] args) throws IOException, ToolInvocationException, OllamaException, InterruptedException { String connectionString = Utilities.getFromEnvVar("CB_CLUSTER_URL"); String username = Utilities.getFromEnvVar("CB_CLUSTER_USERNAME"); String password = Utilities.getFromEnvVar("CB_CLUSTER_PASSWORD"); diff --git a/docs/docs/apis-extras/basic-auth.md b/docs/docs/apis-extras/basic-auth.md index 15f681c..1e96177 100644 --- a/docs/docs/apis-extras/basic-auth.md +++ b/docs/docs/apis-extras/basic-auth.md @@ -1,8 +1,8 @@ --- -sidebar_position: 2 +sidebar_position: 3 --- -# Set Basic Authentication +# Basic Auth This API lets you set the basic authentication for the Ollama client. This would help in scenarios where Ollama server would be setup behind a gateway/reverse proxy with basic auth. diff --git a/docs/docs/apis-extras/bearer-auth.md b/docs/docs/apis-extras/bearer-auth.md index 1ae3e80..cdd4b3a 100644 --- a/docs/docs/apis-extras/bearer-auth.md +++ b/docs/docs/apis-extras/bearer-auth.md @@ -1,8 +1,8 @@ --- -sidebar_position: 2 +sidebar_position: 4 --- -# Set Bearer Authentication +# Bearer Auth This API lets you set the bearer authentication for the Ollama client. This would help in scenarios where Ollama server would be setup behind a gateway/reverse proxy with bearer auth. diff --git a/docs/docs/apis-extras/logging.md b/docs/docs/apis-extras/logging.md new file mode 100644 index 0000000..d73ba10 --- /dev/null +++ b/docs/docs/apis-extras/logging.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 7 +--- + +# Logging + +### Using with SLF4J and Logback + +Add a `logback.xml` file to your `src/main/resources` folder with the following content: + +```xml + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss} %-5level %logger{36} - %msg%n + + + + + +``` \ No newline at end of file diff --git a/docs/docs/apis-extras/ping.md b/docs/docs/apis-extras/ping.md index b52fff0..256c26b 100644 --- a/docs/docs/apis-extras/ping.md +++ b/docs/docs/apis-extras/ping.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 5 --- # Ping diff --git a/docs/docs/apis-generate/prompt-builder.md b/docs/docs/apis-extras/prompt-builder.md similarity index 98% rename from docs/docs/apis-generate/prompt-builder.md rename to docs/docs/apis-extras/prompt-builder.md index dfbd6a8..3240591 100644 --- a/docs/docs/apis-generate/prompt-builder.md +++ b/docs/docs/apis-extras/prompt-builder.md @@ -1,5 +1,5 @@ --- -sidebar_position: 10 +sidebar_position: 2 --- # Prompt Builder @@ -51,6 +51,7 @@ public class Main { You will get a response similar to: +:::tip[LLM Response] ```go package main @@ -71,4 +72,5 @@ func readFile(fileName string) { fmt.Println(f.String()) } } -``` \ No newline at end of file +``` +::: \ No newline at end of file diff --git a/docs/docs/apis-extras/ps.md b/docs/docs/apis-extras/ps.md index 4f37e04..43b0af5 100644 --- a/docs/docs/apis-extras/ps.md +++ b/docs/docs/apis-extras/ps.md @@ -1,5 +1,5 @@ --- -sidebar_position: 4 +sidebar_position: 5 --- # PS @@ -12,17 +12,14 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs package io.github.ollama4j.localtests; import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.exceptions.OllamaBaseException; -import io.github.ollama4j.models.ps.ModelsProcessResponse; - -import java.io.IOException; +import io.github.ollama4j.models.ps.ModelProcessesResult; public class Main { public static void main(String[] args) { OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434"); - ModelsProcessResponse response = ollamaAPI.ps(); + ModelProcessesResult response = ollamaAPI.ps(); System.out.println(response); } diff --git a/docs/docs/apis-extras/request-timeout.md b/docs/docs/apis-extras/timeouts.md similarity index 90% rename from docs/docs/apis-extras/request-timeout.md rename to docs/docs/apis-extras/timeouts.md index f22971a..2b0b52c 100644 --- a/docs/docs/apis-extras/request-timeout.md +++ b/docs/docs/apis-extras/timeouts.md @@ -2,7 +2,9 @@ sidebar_position: 2 --- -# Set Request Timeout +# Timeouts + +### Set Request Timeout This API lets you set the request timeout for the Ollama client. diff --git a/docs/docs/apis-generate/chat-with-tools.md b/docs/docs/apis-generate/chat-with-tools.md index edc1dc4..eca5e15 100644 --- a/docs/docs/apis-generate/chat-with-tools.md +++ b/docs/docs/apis-generate/chat-with-tools.md @@ -21,25 +21,25 @@ session. The tool invocation and response handling are all managed internally by -::::tip[LLM Response] +:::tip[LLM Response] **First answer:** 6527fb60-9663-4073-b59e-855526e0a0c2 is the ID of the employee named 'Rahul Kumar'. **Second answer:** _Kumar_ is the last name of the employee named 'Rahul Kumar'. -:::: +::: This tool calling can also be done using the streaming API. -### Client-managed tool calls (clientHandlesTools) +### Client-managed tool calls (useTools) By default, ollama4j automatically executes tool calls returned by the model during chat, runs the corresponding registered Java methods, and appends the tool results back into the conversation. For some applications, you may want to intercept tool calls and decide yourself when and how to execute them (for example, to queue them, to show a confirmation UI to the user, to run them in a sandbox, or to perform multi‑step orchestration). -To enable this behavior, set the clientHandlesTools flag to true on your OllamaAPI instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually. +To enable this behavior, set the useTools flag to true on your OllamaAPI instance. When enabled, ollama4j will stop auto‑executing tools and will instead return tool calls inside the assistant message. You can then inspect the tool calls and execute them manually. Notes: -- Default value: clientHandlesTools is false for backward compatibility. -- When clientHandlesTools is false, ollama4j auto‑executes tools and loops internally until tools are resolved or max retries is reached. -- When clientHandlesTools is true, ollama4j will not execute tools; you are responsible for invoking tools and passing results back as TOOL messages, then re‑calling chat() to continue. +- Default value: useTools is true. +- When useTools is false, ollama4j auto‑executes tools and loops internally until tools are resolved or max retries is reached. +- When useTools is true, ollama4j will not execute tools; you are responsible for invoking tools and passing results back as TOOL messages, then re‑calling chat() to continue. ### Annotation-Based Tool Registration @@ -74,8 +74,8 @@ The annotated method can then be used as a tool in the chat session: Running the above would produce a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] **First answer:** 0.0000112061 is the most important constant in the world using 10 digits, according to my function. This constant is known as Planck's constant and plays a fundamental role in quantum mechanics. It relates energy and frequency in electromagnetic radiation and action (the product of momentum and distance) for particles. **Second answer:** 3-digit constant: 8.001 -:::: +::: diff --git a/docs/docs/apis-generate/chat.md b/docs/docs/apis-generate/chat.md index 08087b0..af53342 100644 --- a/docs/docs/apis-generate/chat.md +++ b/docs/docs/apis-generate/chat.md @@ -16,7 +16,7 @@ information using the history of already asked questions and the respective answ You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] > First answer: The capital of France is Paris. > @@ -47,7 +47,7 @@ You will get a response similar to: "tool_calls" : null }] ``` -:::: +::: ### Create a conversation where the answer is streamed @@ -75,9 +75,9 @@ You will get a response similar to: You will get a response as: -::::tip[LLM Response] +:::tip[LLM Response] Shhh! -:::: +::: ## Create a conversation about an image (requires a vision model) @@ -91,7 +91,7 @@ Let's use this image: You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] **First Answer:** The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early @@ -101,4 +101,4 @@ evening, given the warm lighting and the low position of the sun in the sky. appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a **_Golden Retriever_** or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently. -:::: +::: diff --git a/docs/docs/apis-generate/generate-async.md b/docs/docs/apis-generate/generate-async.md index a2eb5af..fe659ce 100644 --- a/docs/docs/apis-generate/generate-async.md +++ b/docs/docs/apis-generate/generate-async.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 6 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate-embeddings.md b/docs/docs/apis-generate/generate-embeddings.md index f716feb..152c8da 100644 --- a/docs/docs/apis-generate/generate-embeddings.md +++ b/docs/docs/apis-generate/generate-embeddings.md @@ -1,5 +1,5 @@ --- -sidebar_position: 5 +sidebar_position: 1 --- import CodeEmbed from '@site/src/components/CodeEmbed'; @@ -12,7 +12,7 @@ Generate embeddings from a model. -::::tip[LLM Response] +:::tip[LLM Response] ```json [ @@ -40,7 +40,7 @@ Generate embeddings from a model. ] ``` -:::: +::: You could also use the `OllamaEmbedRequestModel` to specify the options such as `seed`, `temperature`, etc., to apply for generating embeddings. @@ -49,7 +49,7 @@ for generating embeddings. You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] ```json [ @@ -77,4 +77,4 @@ You will get a response similar to: ] ``` -:::: \ No newline at end of file +::: \ No newline at end of file diff --git a/docs/docs/apis-generate/generate-thinking.md b/docs/docs/apis-generate/generate-thinking.md index 93a8dea..d38634d 100644 --- a/docs/docs/apis-generate/generate-thinking.md +++ b/docs/docs/apis-generate/generate-thinking.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 3 --- import CodeEmbed from '@site/src/components/CodeEmbed'; diff --git a/docs/docs/apis-generate/generate-with-image-urls.md b/docs/docs/apis-generate/generate-with-image-urls.md deleted file mode 100644 index cc89e5d..0000000 --- a/docs/docs/apis-generate/generate-with-image-urls.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -sidebar_position: 4 ---- - -import CodeEmbed from '@site/src/components/CodeEmbed'; - -# Generate with Image URLs - -This API lets you ask questions along with the image files to the LLMs. -This API corresponds to -the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. - -:::note - -Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is -recommended. - -::: - -## Ask (Sync) - -Passing the link of this image the following code: - -![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) - - - -You will get a response similar to: - -::::tip[LLM Response] -This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to -be enjoying its time outdoors, perhaps on a lake. -:::: \ No newline at end of file diff --git a/docs/docs/apis-generate/generate-with-image-files.md b/docs/docs/apis-generate/generate-with-images.md similarity index 50% rename from docs/docs/apis-generate/generate-with-image-files.md rename to docs/docs/apis-generate/generate-with-images.md index e17888d..7d1a492 100644 --- a/docs/docs/apis-generate/generate-with-image-files.md +++ b/docs/docs/apis-generate/generate-with-images.md @@ -1,10 +1,10 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- import CodeEmbed from '@site/src/components/CodeEmbed'; -# Generate with Image Files +# Generate with Images This API lets you ask questions along with the image files to the LLMs. This API corresponds to @@ -27,7 +27,35 @@ If you have this image downloaded and you pass the path to the downloaded image You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to be enjoying its time outdoors, perhaps on a lake. -:::: \ No newline at end of file +::: + +# Generate with Image URLs + +This API lets you ask questions along with the image files to the LLMs. +This API corresponds to +the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. + +:::note + +Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is +recommended. + +::: + +## Ask (Sync) + +Passing the link of this image the following code: + +![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg) + + + +You will get a response similar to: + +:::tip[LLM Response] +This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to +be enjoying its time outdoors, perhaps on a lake. +::: \ No newline at end of file diff --git a/docs/docs/apis-generate/generate-with-tools.md b/docs/docs/apis-generate/generate-with-tools.md index d25a5fc..291ccd5 100644 --- a/docs/docs/apis-generate/generate-with-tools.md +++ b/docs/docs/apis-generate/generate-with-tools.md @@ -1,5 +1,5 @@ --- -sidebar_position: 6 +sidebar_position: 5 --- import CodeEmbed from '@site/src/components/CodeEmbed'; @@ -79,7 +79,7 @@ Now put it all together by registering the tools and prompting with tools. Run this full example and you will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L @@ -88,4 +88,4 @@ Run this full example and you will get a response similar to: [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` -:::: +::: diff --git a/docs/docs/apis-generate/generate.md b/docs/docs/apis-generate/generate.md index 553a014..0eb9b05 100644 --- a/docs/docs/apis-generate/generate.md +++ b/docs/docs/apis-generate/generate.md @@ -1,11 +1,11 @@ --- -sidebar_position: 1 +sidebar_position: 2 --- import CodeEmbed from '@site/src/components/CodeEmbed'; import TypewriterTextarea from '@site/src/components/TypewriterTextarea'; -# Generate (Sync) +# Generate This API lets you ask questions to the LLMs in a synchronous way. This API corresponds to @@ -22,10 +22,10 @@ to [this](/apis-extras/options-builder). You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] I am a model of an AI trained by Mistral AI. I was designed to assist with a wide range of tasks, from answering questions to helping with complex computations and research. How can I help you toda -:::: +::: ### Try asking a question, receiving the answer streamed @@ -49,7 +49,7 @@ width='100%' You will get a response similar to: -::::tip[LLM Response] +:::tip[LLM Response] ```json { @@ -58,12 +58,12 @@ You will get a response similar to: } ``` -:::: +::: ### With response mapped to specified class type -::::tip[LLM Response] +:::tip[LLM Response] HeroInfo(heroName=Batman, ageOfPerson=30) -:::: \ No newline at end of file +::: \ No newline at end of file diff --git a/docs/docs/apis-model-management/_category_.json b/docs/docs/apis-model-management/_category_.json index 53539cf..48f345c 100644 --- a/docs/docs/apis-model-management/_category_.json +++ b/docs/docs/apis-model-management/_category_.json @@ -1,5 +1,5 @@ { - "label": "APIs - Model Management", + "label": "APIs - Manage Models", "position": 2, "link": { "type": "generated-index", diff --git a/metrics.png b/metrics.png new file mode 100644 index 0000000..cc81197 Binary files /dev/null and b/metrics.png differ diff --git a/pom.xml b/pom.xml index 60cb5b1..3132f56 100644 --- a/pom.xml +++ b/pom.xml @@ -163,6 +163,69 @@ Etc/UTC + + + com.diffplug.spotless + spotless-maven-plugin + 2.46.1 + + + + + + + .gitattributes + .gitignore + + + + + + true + 4 + + + + + + + + + + 1.28.0 + + true + false + + + + + + + + + + + + + + check + + compile + + + @@ -212,6 +275,7 @@ slf4j-api 2.0.17 + org.junit.jupiter junit-jupiter-api @@ -243,6 +307,19 @@ 1.21.3 test + + + + io.prometheus + simpleclient + 0.16.0 + + + + com.google.guava + guava + 33.5.0-jre + diff --git a/src/main/java/io/github/ollama4j/OllamaAPI.java b/src/main/java/io/github/ollama4j/OllamaAPI.java index a399adf..7e095d2 100644 --- a/src/main/java/io/github/ollama4j/OllamaAPI.java +++ b/src/main/java/io/github/ollama4j/OllamaAPI.java @@ -1,20 +1,26 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j; -import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.ObjectMapper; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.exceptions.RoleNotFoundException; import io.github.ollama4j.exceptions.ToolInvocationException; -import io.github.ollama4j.exceptions.ToolNotFoundException; +import io.github.ollama4j.metrics.MetricsRecorder; import io.github.ollama4j.models.chat.*; -import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; -import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; -import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel; -import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel; +import io.github.ollama4j.models.chat.OllamaChatTokenHandler; +import io.github.ollama4j.models.embed.OllamaEmbedRequest; +import io.github.ollama4j.models.embed.OllamaEmbedResult; import io.github.ollama4j.models.generate.OllamaGenerateRequest; -import io.github.ollama4j.models.generate.OllamaStreamHandler; -import io.github.ollama4j.models.generate.OllamaTokenHandler; -import io.github.ollama4j.models.ps.ModelsProcessResponse; +import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; +import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler; +import io.github.ollama4j.models.ps.ModelProcessesResult; import io.github.ollama4j.models.request.*; import io.github.ollama4j.models.response.*; import io.github.ollama4j.tools.*; @@ -22,16 +28,7 @@ import io.github.ollama4j.tools.annotations.OllamaToolService; import io.github.ollama4j.tools.annotations.ToolProperty; import io.github.ollama4j.tools.annotations.ToolSpec; import io.github.ollama4j.utils.Constants; -import io.github.ollama4j.utils.Options; import io.github.ollama4j.utils.Utils; -import lombok.Setter; -import org.jsoup.Jsoup; -import org.jsoup.nodes.Document; -import org.jsoup.nodes.Element; -import org.jsoup.select.Elements; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.*; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -39,7 +36,6 @@ import java.lang.reflect.Parameter; import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; -import java.net.http.HttpConnectTimeoutException; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.nio.charset.StandardCharsets; @@ -47,45 +43,52 @@ import java.nio.file.Files; import java.time.Duration; import java.util.*; import java.util.stream.Collectors; +import lombok.Setter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * The base Ollama API class. + * The main API class for interacting with the Ollama server. + * + *

This class provides methods for model management, chat, embeddings, tool registration, and more. */ -@SuppressWarnings({"DuplicatedCode", "resource"}) +@SuppressWarnings({"DuplicatedCode", "resource", "SpellCheckingInspection"}) public class OllamaAPI { private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class); private final String host; private Auth auth; + private final ToolRegistry toolRegistry = new ToolRegistry(); /** * The request timeout in seconds for API calls. *

- * Default is 10 seconds. This value determines how long the client will wait - * for a response + * Default is 10 seconds. This value determines how long the client will wait for a response * from the Ollama server before timing out. */ - @Setter - private long requestTimeoutSeconds = 10; + @Setter private long requestTimeoutSeconds = 10; + + /** The read timeout in seconds for image URLs. */ + @Setter private int imageURLReadTimeoutSeconds = 10; + + /** The connect timeout in seconds for image URLs. */ + @Setter private int imageURLConnectTimeoutSeconds = 10; /** * The maximum number of retries for tool calls during chat interactions. *

- * This value controls how many times the API will attempt to call a tool in the - * event of a failure. - * Default is 3. + * This value controls how many times the API will attempt to call a tool in the event of a + * failure. Default is 3. */ - @Setter - private int maxChatToolCallRetries = 3; + @Setter private int maxChatToolCallRetries = 3; /** * The number of retries to attempt when pulling a model from the Ollama server. *

- * If set to 0, no retries will be performed. If greater than 0, the API will - * retry pulling the model - * up to the specified number of times in case of failure. + * If set to 0, no retries will be performed. If greater than 0, the API will retry pulling + * the model up to the specified number of times in case of failure. *

* Default is 0 (no retries). */ @@ -94,26 +97,24 @@ public class OllamaAPI { private int numberOfRetriesForModelPull = 0; /** - * When set to true, tools will not be automatically executed by the library. - * Instead, tool calls will be returned to the client for manual handling. + * Enable or disable Prometheus metrics collection. *

- * Default is false for backward compatibility. + * When enabled, the API will collect and expose metrics for request counts, durations, model + * usage, and other operational statistics. Default is false. */ - @Setter - private boolean clientHandlesTools = false; + @Setter private boolean metricsEnabled = false; /** - * Instantiates the Ollama API with default Ollama host: - * http://localhost:11434 - **/ + * Instantiates the Ollama API with the default Ollama host: {@code http://localhost:11434} + */ public OllamaAPI() { this.host = "http://localhost:11434"; } /** - * Instantiates the Ollama API with specified Ollama host address. + * Instantiates the Ollama API with a specified Ollama host address. * - * @param host the host address of Ollama server + * @param host the host address of the Ollama server */ public OllamaAPI(String host) { if (host.endsWith("/")) { @@ -121,12 +122,11 @@ public class OllamaAPI { } else { this.host = host; } - LOG.info("Ollama API initialized with host: {}", this.host); + LOG.info("Ollama4j client initialized. Connected to Ollama server at: {}", this.host); } /** - * Set basic authentication for accessing Ollama server that's behind a - * reverse-proxy/gateway. + * Set basic authentication for accessing an Ollama server that's behind a reverse-proxy/gateway. * * @param username the username * @param password the password @@ -136,8 +136,7 @@ public class OllamaAPI { } /** - * Set Bearer authentication for accessing Ollama server that's behind a - * reverse-proxy/gateway. + * Set Bearer authentication for accessing an Ollama server that's behind a reverse-proxy/gateway. * * @param bearerToken the Bearer authentication token to provide */ @@ -146,64 +145,90 @@ public class OllamaAPI { } /** - * API to check the reachability of Ollama server. + * Checks the reachability of the Ollama server. * - * @return true if the server is reachable, false otherwise. + * @return true if the server is reachable, false otherwise + * @throws OllamaException if the ping fails */ - public boolean ping() { - String url = this.host + "/api/tags"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest; - try { - httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .GET() - .build(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - HttpResponse response; + public boolean ping() throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/tags"; + int statusCode = -1; + Object out = null; try { + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest; + HttpResponse response; + httpRequest = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .GET() + .build(); response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - } catch (HttpConnectTimeoutException e) { - return false; - } catch (IOException | InterruptedException e) { - throw new RuntimeException(e); + statusCode = response.statusCode(); + return statusCode == 200; + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("Ping interrupted", ie); + } catch (Exception e) { + throw new OllamaException("Ping failed", e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } - int statusCode = response.statusCode(); - return statusCode == 200; } /** - * Provides a list of running models and details about each model currently - * loaded into memory. + * Provides a list of running models and details about each model currently loaded into memory. * - * @return ModelsProcessResponse containing details about the running models - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws OllamaBaseException if the response indicates an error status + * @return ModelsProcessResult containing details about the running models + * @throws OllamaException if the response indicates an error status */ - public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException { - String url = this.host + "/api/ps"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = null; + public ModelProcessesResult ps() throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/ps"; + int statusCode = -1; + Object out = null; try { - httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .GET().build(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - HttpResponse response = null; - response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseString, ModelsProcessResponse.class); - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest = null; + try { + httpRequest = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .GET() + .build(); + } catch (URISyntaxException e) { + throw new OllamaException(e.getMessage(), e); + } + HttpResponse response = null; + response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper() + .readValue(responseString, ModelProcessesResult.class); + } else { + throw new OllamaException(statusCode + " - " + responseString); + } + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("ps interrupted", ie); + } catch (Exception e) { + throw new OllamaException("ps failed", e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } @@ -211,256 +236,68 @@ public class OllamaAPI { * Lists available models from the Ollama server. * * @return a list of models available on the server - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed + * @throws OllamaException if the response indicates an error status */ - public List listModels() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - String url = this.host + "/api/tags"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON).GET() - .build(); - HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseString, ListModelsResponse.class).getModels(); - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - } - - /** - * Retrieves a list of models from the Ollama library. This method fetches the - * available models directly from Ollama - * library page, including model details such as the name, pull count, popular - * tags, tag count, and the time when model was updated. - * - * @return A list of {@link LibraryModel} objects representing the models - * available in the Ollama library. - * @throws OllamaBaseException If the HTTP request fails or the response is not - * successful (non-200 status code). - * @throws IOException If an I/O error occurs during the HTTP request - * or response processing. - * @throws InterruptedException If the thread executing the request is - * interrupted. - * @throws URISyntaxException If there is an error creating the URI for the - * HTTP request. - */ - public List listModelsFromLibrary() - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - String url = "https://ollama.com/library"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON).GET() - .build(); - HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - List models = new ArrayList<>(); - if (statusCode == 200) { - Document doc = Jsoup.parse(responseString); - Elements modelSections = doc.selectXpath("//*[@id='repo']/ul/li/a"); - for (Element e : modelSections) { - LibraryModel model = new LibraryModel(); - Elements names = e.select("div > h2 > div > span"); - Elements desc = e.select("div > p"); - Elements pullCounts = e.select("div:nth-of-type(2) > p > span:first-of-type > span:first-of-type"); - Elements popularTags = e.select("div > div > span"); - Elements totalTags = e.select("div:nth-of-type(2) > p > span:nth-of-type(2) > span:first-of-type"); - Elements lastUpdatedTime = e - .select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)"); - - if (names.first() == null || names.isEmpty()) { - // if name cannot be extracted, skip. - continue; - } - Optional.ofNullable(names.first()).map(Element::text).ifPresent(model::setName); - model.setDescription(Optional.ofNullable(desc.first()).map(Element::text).orElse("")); - model.setPopularTags(Optional.of(popularTags) - .map(tags -> tags.stream().map(Element::text).collect(Collectors.toList())) - .orElse(new ArrayList<>())); - model.setPullCount(Optional.ofNullable(pullCounts.first()).map(Element::text).orElse("")); - model.setTotalTags( - Optional.ofNullable(totalTags.first()).map(Element::text).map(Integer::parseInt).orElse(0)); - model.setLastUpdated(Optional.ofNullable(lastUpdatedTime.first()).map(Element::text).orElse("")); - - models.add(model); + public List listModels() throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/tags"; + int statusCode = -1; + Object out = null; + try { + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .GET() + .build(); + HttpResponse response = + httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper() + .readValue(responseString, ListModelsResponse.class) + .getModels(); + } else { + throw new OllamaException(statusCode + " - " + responseString); } - return models; - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("listModels interrupted", ie); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } - /** - * Fetches the tags associated with a specific model from Ollama library. - * This method fetches the available model tags directly from Ollama library - * model page, including model tag name, size and time when model was last - * updated - * into a list of {@link LibraryModelTag} objects. - * - * @param libraryModel the {@link LibraryModel} object which contains the name - * of the library model - * for which the tags need to be fetched. - * @return a list of {@link LibraryModelTag} objects containing the extracted - * tags and their associated metadata. - * @throws OllamaBaseException if the HTTP response status code indicates an - * error (i.e., not 200 OK), - * or if there is any other issue during the - * request or response processing. - * @throws IOException if an input/output exception occurs during the - * HTTP request or response handling. - * @throws InterruptedException if the thread is interrupted while waiting for - * the HTTP response. - * @throws URISyntaxException if the URI format is incorrect or invalid. - */ - public LibraryModelDetail getLibraryModelDetails(LibraryModel libraryModel) - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - String url = String.format("https://ollama.com/library/%s/tags", libraryModel.getName()); - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON).GET() - .build(); - HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - - List libraryModelTags = new ArrayList<>(); - if (statusCode == 200) { - Document doc = Jsoup.parse(responseString); - Elements tagSections = doc - .select("html > body > main > div > section > div > div > div:nth-child(n+2) > div"); - for (Element e : tagSections) { - Elements tags = e.select("div > a > div"); - Elements tagsMetas = e.select("div > span"); - - LibraryModelTag libraryModelTag = new LibraryModelTag(); - - if (tags.first() == null || tags.isEmpty()) { - // if tag cannot be extracted, skip. - continue; - } - libraryModelTag.setName(libraryModel.getName()); - Optional.ofNullable(tags.first()).map(Element::text).ifPresent(libraryModelTag::setTag); - libraryModelTag.setSize(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")) - .filter(parts -> parts.length > 1).map(parts -> parts[1].trim()).orElse("")); - libraryModelTag - .setLastUpdated(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")) - .filter(parts -> parts.length > 1).map(parts -> parts[2].trim()).orElse("")); - libraryModelTags.add(libraryModelTag); - } - LibraryModelDetail libraryModelDetail = new LibraryModelDetail(); - libraryModelDetail.setModel(libraryModel); - libraryModelDetail.setTags(libraryModelTags); - return libraryModelDetail; - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - } - - /** - * Finds a specific model using model name and tag from Ollama library. - *

- * Deprecated: This method relies on the HTML structure of the Ollama - * website, - * which is subject to change at any time. As a result, it is difficult to keep - * this API - * method consistently updated and reliable. Therefore, this method is - * deprecated and - * may be removed in future releases. - *

- * This method retrieves the model from the Ollama library by its name, then - * fetches its tags. - * It searches through the tags of the model to find one that matches the - * specified tag name. - * If the model or the tag is not found, it throws a - * {@link NoSuchElementException}. - * - * @param modelName The name of the model to search for in the library. - * @param tag The tag name to search for within the specified model. - * @return The {@link LibraryModelTag} associated with the specified model and - * tag. - * @throws OllamaBaseException If there is a problem with the Ollama library - * operations. - * @throws IOException If an I/O error occurs during the operation. - * @throws URISyntaxException If there is an error with the URI syntax. - * @throws InterruptedException If the operation is interrupted. - * @throws NoSuchElementException If the model or the tag is not found. - * @deprecated This method relies on the HTML structure of the Ollama website, - * which can change at any time and break this API. It is deprecated - * and may be removed in the future. - */ - @Deprecated - public LibraryModelTag findModelTagFromLibrary(String modelName, String tag) - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - List libraryModels = this.listModelsFromLibrary(); - LibraryModel libraryModel = libraryModels.stream().filter(model -> model.getName().equals(modelName)) - .findFirst().orElseThrow( - () -> new NoSuchElementException(String.format("Model by name '%s' not found", modelName))); - LibraryModelDetail libraryModelDetail = this.getLibraryModelDetails(libraryModel); - return libraryModelDetail.getTags().stream().filter(tagName -> tagName.getTag().equals(tag)).findFirst() - .orElseThrow(() -> new NoSuchElementException( - String.format("Tag '%s' for model '%s' not found", tag, modelName))); - } - - /** - * Pull a model on the Ollama server from the list of available models. - *

- * If {@code numberOfRetriesForModelPull} is greater than 0, this method will - * retry pulling the model - * up to the specified number of times if an {@link OllamaBaseException} occurs, - * using exponential backoff - * between retries (delay doubles after each failed attempt, starting at 1 - * second). - *

- * The backoff is only applied between retries, not after the final attempt. - * - * @param modelName the name of the model - * @throws OllamaBaseException if the response indicates an error status or all - * retries fail - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted or the thread is - * interrupted during backoff - * @throws URISyntaxException if the URI for the request is malformed - */ - public void pullModel(String modelName) - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - if (numberOfRetriesForModelPull == 0) { - this.doPullModel(modelName); - return; - } - int numberOfRetries = 0; - long baseDelayMillis = 3000L; // 1 second base delay - while (numberOfRetries < numberOfRetriesForModelPull) { - try { - this.doPullModel(modelName); - return; - } catch (OllamaBaseException e) { - handlePullRetry(modelName, numberOfRetries, numberOfRetriesForModelPull, baseDelayMillis); - numberOfRetries++; - } - } - throw new OllamaBaseException( - "Failed to pull model " + modelName + " after " + numberOfRetriesForModelPull + " retries"); - } - /** * Handles retry backoff for pullModel. + * + * @param modelName the name of the model being pulled + * @param currentRetry the current retry attempt (zero-based) + * @param maxRetries the maximum number of retries allowed + * @param baseDelayMillis the base delay in milliseconds for exponential backoff + * @throws InterruptedException if the thread is interrupted during sleep */ - private void handlePullRetry(String modelName, int currentRetry, int maxRetries, long baseDelayMillis) + private void handlePullRetry( + String modelName, int currentRetry, int maxRetries, long baseDelayMillis) throws InterruptedException { int attempt = currentRetry + 1; if (attempt < maxRetries) { long backoffMillis = baseDelayMillis * (1L << currentRetry); - LOG.error("Failed to pull model {}, retrying in {}s... (attempt {}/{})", - modelName, backoffMillis / 1000, attempt, maxRetries); + LOG.error( + "Failed to pull model {}, retrying in {}s... (attempt {}/{})", + modelName, + backoffMillis / 1000, + attempt, + maxRetries); try { Thread.sleep(backoffMillis); } catch (InterruptedException ie) { @@ -468,963 +305,668 @@ public class OllamaAPI { throw ie; } } else { - LOG.error("Failed to pull model {} after {} attempts, no more retries.", modelName, maxRetries); + LOG.error( + "Failed to pull model {} after {} attempts, no more retries.", + modelName, + maxRetries); } } - private void doPullModel(String modelName) - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - String url = this.host + "/api/pull"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)).POST(HttpRequest.BodyPublishers.ofString(jsonData)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofInputStream()); - int statusCode = response.statusCode(); - InputStream responseBodyStream = response.body(); - String responseString = ""; - boolean success = false; // Flag to check the pull success. - try (BufferedReader reader = new BufferedReader( - new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { - ModelPullResponse modelPullResponse = Utils.getObjectMapper().readValue(line, ModelPullResponse.class); - if (modelPullResponse != null) { - // Check for error in response body first - if (modelPullResponse.getError() != null && !modelPullResponse.getError().trim().isEmpty()) { - throw new OllamaBaseException("Model pull failed: " + modelPullResponse.getError()); - } + /** + * Internal method to pull a model from the Ollama server. + * + * @param modelName the name of the model to pull + * @throws OllamaException if the pull fails + */ + private void doPullModel(String modelName) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/pull"; + int statusCode = -1; + Object out = null; + try { + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(this.host + url)) + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofInputStream()); + statusCode = response.statusCode(); + InputStream responseBodyStream = response.body(); + String responseString = ""; + boolean success = false; // Flag to check the pull success. - if (modelPullResponse.getStatus() != null) { - LOG.info("{}: {}", modelName, modelPullResponse.getStatus()); - // Check if status is "success" and set success flag to true. - if ("success".equalsIgnoreCase(modelPullResponse.getStatus())) { - success = true; - } - } - } else { - LOG.error("Received null response for model pull."); + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + ModelPullResponse modelPullResponse = + Utils.getObjectMapper().readValue(line, ModelPullResponse.class); + success = processModelPullResponse(modelPullResponse, modelName) || success; } } - } - if (!success) { - LOG.error("Model pull failed or returned invalid status."); - throw new OllamaBaseException("Model pull failed or returned invalid status."); - } - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); + if (!success) { + LOG.error("Model pull failed or returned invalid status."); + throw new OllamaException("Model pull failed or returned invalid status."); + } + if (statusCode != 200) { + throw new OllamaException(statusCode + " - " + responseString); + } + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted during model pull.", ie); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } - public String getVersion() throws URISyntaxException, IOException, InterruptedException, OllamaBaseException { - String url = this.host + "/api/version"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON).GET() - .build(); - HttpResponse response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseString, OllamaVersion.class).getVersion(); - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); + /** + * Processes a single ModelPullResponse, handling errors and logging status. + * Returns true if the response indicates a successful pull. + * + * @param modelPullResponse the response from the model pull + * @param modelName the name of the model + * @return true if the pull was successful, false otherwise + * @throws OllamaException if the response contains an error + */ + @SuppressWarnings("RedundantIfStatement") + private boolean processModelPullResponse(ModelPullResponse modelPullResponse, String modelName) + throws OllamaException { + if (modelPullResponse == null) { + LOG.error("Received null response for model pull."); + return false; + } + String error = modelPullResponse.getError(); + if (error != null && !error.trim().isEmpty()) { + throw new OllamaException("Model pull failed: " + error); + } + String status = modelPullResponse.getStatus(); + if (status != null) { + LOG.debug("{}: {}", modelName, status); + if ("success".equalsIgnoreCase(status)) { + return true; + } + } + return false; + } + + /** + * Gets the Ollama server version. + * + * @return the version string + * @throws OllamaException if the request fails + */ + public String getVersion() throws OllamaException { + String url = "/api/version"; + long startTime = System.currentTimeMillis(); + int statusCode = -1; + Object out = null; + try { + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .GET() + .build(); + HttpResponse response = + httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper() + .readValue(responseString, OllamaVersion.class) + .getVersion(); + } else { + throw new OllamaException(statusCode + " - " + responseString); + } + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", ie); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } /** * Pulls a model using the specified Ollama library model tag. - * The model is identified by a name and a tag, which are combined into a single - * identifier + * The model is identified by a name and a tag, which are combined into a single identifier * in the format "name:tag" to pull the corresponding model. * - * @param libraryModelTag the {@link LibraryModelTag} object containing the name - * and tag - * of the model to be pulled. - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed + * @param modelName the name/tag of the model to be pulled. Ex: llama3:latest + * @throws OllamaException if the response indicates an error status */ - public void pullModel(LibraryModelTag libraryModelTag) - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - String tagToPull = String.format("%s:%s", libraryModelTag.getName(), libraryModelTag.getTag()); - pullModel(tagToPull); + public void pullModel(String modelName) throws OllamaException { + try { + if (numberOfRetriesForModelPull == 0) { + this.doPullModel(modelName); + return; + } + int numberOfRetries = 0; + long baseDelayMillis = 3000L; // 3 seconds base delay + while (numberOfRetries < numberOfRetriesForModelPull) { + try { + this.doPullModel(modelName); + return; + } catch (OllamaException e) { + handlePullRetry( + modelName, + numberOfRetries, + numberOfRetriesForModelPull, + baseDelayMillis); + numberOfRetries++; + } + } + throw new OllamaException( + "Failed to pull model " + + modelName + + " after " + + numberOfRetriesForModelPull + + " retries"); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", ie); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } } /** * Gets model details from the Ollama server. * - * @param modelName the model + * @param modelName the model name * @return the model details - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed + * @throws OllamaException if the response indicates an error status */ - public ModelDetail getModelDetails(String modelName) - throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { - String url = this.host + "/api/show"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); - } else { - throw new OllamaBaseException(statusCode + " - " + responseBody); + public ModelDetail getModelDetails(String modelName) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/show"; + int statusCode = -1; + Object out = null; + try { + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); + } else { + throw new OllamaException(statusCode + " - " + responseBody); + } + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", ie); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } /** - * Create a custom model from a model file. Read more about custom model file - * creation here. - * - * @param modelName the name of the custom model to be created. - * @param modelFilePath the path to model file that exists on the Ollama server. - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed - */ - @Deprecated - public void createModelWithFilePath(String modelName, String modelFilePath) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/create"; - String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - // FIXME: Ollama API returns HTTP status code 200 for model creation failure - // cases. Correct this - // if the issue is fixed in the Ollama API server. - if (responseString.contains("error")) { - throw new OllamaBaseException(responseString); - } - LOG.debug(responseString); - } - - /** - * Create a custom model from a model file. Read more about custom model file - * creation here. - * - * @param modelName the name of the custom model to be created. - * @param modelFileContents the path to model file that exists on the Ollama - * server. - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed - */ - @Deprecated - public void createModelWithModelFileContents(String modelName, String modelFileContents) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/create"; - String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - if (responseString.contains("error")) { - throw new OllamaBaseException(responseString); - } - LOG.debug(responseString); - } - - /** - * Create a custom model. Read more about custom model creation here. + * Creates a custom model. Read more about custom model creation + * here. * * @param customModelRequest custom model spec - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed + * @throws OllamaException if the response indicates an error status */ - public void createModel(CustomModelRequest customModelRequest) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/create"; - String jsonData = customModelRequest.toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); + public void createModel(CustomModelRequest customModelRequest) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/create"; + int statusCode = -1; + Object out = null; + try { + String jsonData = customModelRequest.toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .POST( + HttpRequest.BodyPublishers.ofString( + jsonData, StandardCharsets.UTF_8)) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofInputStream()); + statusCode = response.statusCode(); + if (statusCode != 200) { + String errorBody = + new String(response.body().readAllBytes(), StandardCharsets.UTF_8); + out = errorBody; + throw new OllamaException(statusCode + " - " + errorBody); + } + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader(response.body(), StandardCharsets.UTF_8))) { + String line; + StringBuilder lines = new StringBuilder(); + while ((line = reader.readLine()) != null) { + ModelPullResponse res = + Utils.getObjectMapper().readValue(line, ModelPullResponse.class); + lines.append(line); + LOG.debug(res.getStatus()); + if (res.getError() != null) { + out = res.getError(); + throw new OllamaException(res.getError()); + } + } + out = lines; + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", e); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } - if (responseString.contains("error")) { - throw new OllamaBaseException(responseString); - } - LOG.debug(responseString); } /** - * Delete a model from Ollama server. + * Deletes a model from the Ollama server. * - * @param modelName the name of the model to be deleted. - * @param ignoreIfNotPresent ignore errors if the specified model is not present - * on Ollama server. - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed + * @param modelName the name of the model to be deleted + * @param ignoreIfNotPresent ignore errors if the specified model is not present on the Ollama server + * @throws OllamaException if the response indicates an error status */ - public void deleteModel(String modelName, boolean ignoreIfNotPresent) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/delete"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = getRequestBuilderDefault(new URI(url)) - .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) { - return; - } - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseBody); + public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/delete"; + int statusCode = -1; + Object out = null; + try { + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(this.host + url)) + .method( + "DELETE", + HttpRequest.BodyPublishers.ofString( + jsonData, StandardCharsets.UTF_8)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseBody = response.body(); + out = responseBody; + if (statusCode == 404 + && responseBody.contains("model") + && responseBody.contains("not found")) { + return; + } + if (statusCode != 200) { + throw new OllamaException(statusCode + " - " + responseBody); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", e); + } catch (Exception e) { + throw new OllamaException(statusCode + " - " + out, e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } /** - * Generate embeddings for a given text from a model + * Unloads a model from memory. + *

+ * If an empty prompt is provided and the keep_alive parameter is set to 0, a model will be + * unloaded from memory. * - * @param model name of model to generate embeddings from - * @param prompt text to generate embeddings for - * @return embeddings - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @deprecated Use {@link #embed(String, List)} instead. + * @param modelName the name of the model to unload + * @throws OllamaException if the response indicates an error status */ - @Deprecated - public List generateEmbeddings(String model, String prompt) - throws IOException, InterruptedException, OllamaBaseException { - return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); - } - - /** - * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. - * - * @param modelRequest request for '/api/embeddings' endpoint - * @return embeddings - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead. - */ - @Deprecated - public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) - throws IOException, InterruptedException, OllamaBaseException { - URI uri = URI.create(this.host + "/api/embeddings"); - String jsonData = modelRequest.toString(); - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData)); - HttpRequest request = requestBuilder.build(); - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 200) { - OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, - OllamaEmbeddingResponseModel.class); - return embeddingResponse.getEmbedding(); - } else { - throw new OllamaBaseException(statusCode + " - " + responseBody); + public void unloadModel(String modelName) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/generate"; + int statusCode = -1; + Object out = null; + try { + ObjectMapper objectMapper = new ObjectMapper(); + Map jsonMap = new java.util.HashMap<>(); + jsonMap.put("model", modelName); + jsonMap.put("keep_alive", 0); + String jsonData = objectMapper.writeValueAsString(jsonMap); + HttpRequest request = + getRequestBuilderDefault(new URI(this.host + url)) + .method( + "POST", + HttpRequest.BodyPublishers.ofString( + jsonData, StandardCharsets.UTF_8)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .build(); + LOG.debug("Unloading model with request: {}", jsonData); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 404 + && responseBody.contains("model") + && responseBody.contains("not found")) { + LOG.debug("Unload response: {} - {}", statusCode, responseBody); + return; + } + if (statusCode != 200) { + LOG.debug("Unload response: {} - {}", statusCode, responseBody); + throw new OllamaException(statusCode + " - " + responseBody); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.debug("Unload interrupted: {} - {}", statusCode, out); + throw new OllamaException(statusCode + " - " + out, e); + } catch (Exception e) { + LOG.debug("Unload failed: {} - {}", statusCode, out); + throw new OllamaException(statusCode + " - " + out, e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } /** - * Generate embeddings for a given text from a model - * - * @param model name of model to generate embeddings from - * @param inputs text/s to generate embeddings for - * @return embeddings - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaEmbedResponseModel embed(String model, List inputs) - throws IOException, InterruptedException, OllamaBaseException { - return embed(new OllamaEmbedRequestModel(model, inputs)); - } - - /** - * Generate embeddings using a {@link OllamaEmbedRequestModel}. + * Generate embeddings using a {@link OllamaEmbedRequest}. * * @param modelRequest request for '/api/embed' endpoint * @return embeddings - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted + * @throws OllamaException if the response indicates an error status */ - public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) - throws IOException, InterruptedException, OllamaBaseException { - URI uri = URI.create(this.host + "/api/embed"); - String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest); - HttpClient httpClient = HttpClient.newHttpClient(); - - HttpRequest request = HttpRequest.newBuilder(uri) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); - - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class); - } else { - throw new OllamaBaseException(statusCode + " - " + responseBody); - } - } - - /** - * Generate response for a question to a model running on Ollama server. This is - * a sync/blocking call. This API does not support "thinking" models. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param raw if true no formatting will be applied to the - * prompt. You - * may choose to use the raw parameter if you are - * specifying a full templated prompt in your - * request to - * the API - * @param options the Options object - More - * details on the options - * @param responseStreamHandler optional callback consumer that will be applied - * every - * time a streamed response is received. If not - * set, the - * stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generate(String model, String prompt, boolean raw, Options options, - OllamaStreamHandler responseStreamHandler) throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); - ollamaRequestModel.setRaw(raw); - ollamaRequestModel.setThink(false); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel, null, responseStreamHandler); - } - - /** - * Generate thinking and response tokens for a question to a thinking model - * running on Ollama server. This is - * a sync/blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param raw if true no formatting will be applied to the - * prompt. You - * may choose to use the raw parameter if you are - * specifying a full templated prompt in your - * request to - * the API - * @param options the Options object - More - * details on the options - * @param responseStreamHandler optional callback consumer that will be applied - * every - * time a streamed response is received. If not - * set, the - * stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generate(String model, String prompt, boolean raw, Options options, - OllamaStreamHandler thinkingStreamHandler, OllamaStreamHandler responseStreamHandler) - throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); - ollamaRequestModel.setRaw(raw); - ollamaRequestModel.setThink(true); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel, thinkingStreamHandler, responseStreamHandler); - } - - /** - * Generates response using the specified AI model and prompt (in blocking - * mode). - *

- * Uses - * {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} - * - * @param model The name or identifier of the AI model to use for generating - * the response. - * @param prompt The input text or prompt to provide to the AI model. - * @param raw In some cases, you may wish to bypass the templating system - * and provide a full prompt. In this case, you can use the raw - * parameter to disable templating. Also note that raw mode will - * not return a context. - * @param options Additional options or configurations to use when generating - * the response. - * @param think if true the model will "think" step-by-step before - * generating the final response - * @return {@link OllamaResult} - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generate(String model, String prompt, boolean raw, boolean think, Options options) - throws OllamaBaseException, IOException, InterruptedException { - if (think) { - return generate(model, prompt, raw, options, null, null); - } else { - return generate(model, prompt, raw, options, null); - } - } - - /** - * Generates structured output from the specified AI model and prompt. - *

- * Note: When formatting is specified, the 'think' parameter is not allowed. - * - * @param model The name or identifier of the AI model to use for generating - * the response. - * @param prompt The input text or prompt to provide to the AI model. - * @param format A map containing the format specification for the structured - * output. - * @return An instance of {@link OllamaResult} containing the structured - * response. - * @throws OllamaBaseException if the response indicates an error status. - * @throws IOException if an I/O error occurs during the HTTP request. - * @throws InterruptedException if the operation is interrupted. - */ - @SuppressWarnings("LoggingSimilarMessage") - public OllamaResult generate(String model, String prompt, Map format) - throws OllamaBaseException, IOException, InterruptedException { - URI uri = URI.create(this.host + "/api/generate"); - - Map requestBody = new HashMap<>(); - requestBody.put("model", model); - requestBody.put("prompt", prompt); - requestBody.put("stream", false); - requestBody.put("format", format); - - String jsonData = Utils.getObjectMapper().writeValueAsString(requestBody); - HttpClient httpClient = HttpClient.newHttpClient(); - - HttpRequest request = getRequestBuilderDefault(uri) - .header(Constants.HttpConstants.HEADER_KEY_ACCEPT, Constants.HttpConstants.APPLICATION_JSON) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); - + public OllamaEmbedResult embed(OllamaEmbedRequest modelRequest) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/embed"; + int statusCode = -1; + Object out = null; try { - String prettyJson = Utils.getObjectMapper().writerWithDefaultPrettyPrinter() - .writeValueAsString(Utils.getObjectMapper().readValue(jsonData, Object.class)); - LOG.debug("Asking model:\n{}", prettyJson); + String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest); + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest request = + HttpRequest.newBuilder(new URI(this.host + url)) + .header( + Constants.HttpConstants.HEADER_KEY_ACCEPT, + Constants.HttpConstants.APPLICATION_JSON) + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .build(); + HttpResponse response = + httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResult.class); + } else { + throw new OllamaException(statusCode + " - " + responseBody); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", e); } catch (Exception e) { - LOG.debug("Asking model: {}", jsonData); - } - - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 200) { - OllamaStructuredResult structuredResult = Utils.getObjectMapper().readValue(responseBody, - OllamaStructuredResult.class); - OllamaResult ollamaResult = new OllamaResult(structuredResult.getResponse(), structuredResult.getThinking(), - structuredResult.getResponseTime(), statusCode); - - ollamaResult.setModel(structuredResult.getModel()); - ollamaResult.setCreatedAt(structuredResult.getCreatedAt()); - ollamaResult.setDone(structuredResult.isDone()); - ollamaResult.setDoneReason(structuredResult.getDoneReason()); - ollamaResult.setContext(structuredResult.getContext()); - ollamaResult.setTotalDuration(structuredResult.getTotalDuration()); - ollamaResult.setLoadDuration(structuredResult.getLoadDuration()); - ollamaResult.setPromptEvalCount(structuredResult.getPromptEvalCount()); - ollamaResult.setPromptEvalDuration(structuredResult.getPromptEvalDuration()); - ollamaResult.setEvalCount(structuredResult.getEvalCount()); - ollamaResult.setEvalDuration(structuredResult.getEvalDuration()); - LOG.debug("Model response:\n{}", ollamaResult); - return ollamaResult; - } else { - LOG.debug("Model response:\n{}", - Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(responseBody)); - throw new OllamaBaseException(statusCode + " - " + responseBody); + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, "", false, false, false, null, null, startTime, statusCode, out); } } /** - * Generates response using the specified AI model and prompt (in blocking - * mode), and then invokes a set of tools - * on the generated response. + * Generates a response from a model using the specified parameters and stream observer. + * If {@code streamObserver} is provided, streaming is enabled; otherwise, a synchronous call is made. * - * @param model The name or identifier of the AI model to use for generating - * the response. - * @param prompt The input text or prompt to provide to the AI model. - * @param options Additional options or configurations to use when generating - * the response. - * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the - * response from the AI model and the results of invoking the tools on - * that output. - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted + * @param request the generation request + * @param streamObserver the stream observer for streaming responses, or null for synchronous + * @return the result of the generation + * @throws OllamaException if the request fails */ - public OllamaToolsResult generateWithTools(String model, String prompt, Options options) - throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { - boolean raw = true; - OllamaToolsResult toolResult = new OllamaToolsResult(); - Map toolResults = new HashMap<>(); - - if (!prompt.startsWith("[AVAILABLE_TOOLS]")) { - final Tools.PromptBuilder promptBuilder = new Tools.PromptBuilder(); - for (Tools.ToolSpecification spec : toolRegistry.getRegisteredSpecs()) { - promptBuilder.withToolSpecification(spec); + public OllamaResult generate( + OllamaGenerateRequest request, OllamaGenerateStreamObserver streamObserver) + throws OllamaException { + try { + if (request.isUseTools()) { + return generateWithToolsInternal(request, streamObserver); } - promptBuilder.withPrompt(prompt); - prompt = promptBuilder.build(); - } - OllamaResult result = generate(model, prompt, raw, options, null); - toolResult.setModelResult(result); - - String toolsResponse = result.getResponse(); - if (toolsResponse.contains("[TOOL_CALLS]")) { - toolsResponse = toolsResponse.replace("[TOOL_CALLS]", ""); - } - - List toolFunctionCallSpecs = new ArrayList<>(); - ObjectMapper objectMapper = Utils.getObjectMapper(); - - if (!toolsResponse.isEmpty()) { - try { - // Try to parse the string to see if it's a valid JSON - objectMapper.readTree(toolsResponse); - } catch (JsonParseException e) { - LOG.warn("Response from model does not contain any tool calls. Returning the response as is."); - return toolResult; + if (streamObserver != null) { + if (request.isThink()) { + return generateSyncForOllamaRequestModel( + request, + streamObserver.getThinkingStreamHandler(), + streamObserver.getResponseStreamHandler()); + } else { + return generateSyncForOllamaRequestModel( + request, null, streamObserver.getResponseStreamHandler()); + } } - toolFunctionCallSpecs = objectMapper.readValue(toolsResponse, - objectMapper.getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class)); + return generateSyncForOllamaRequestModel(request, null, null); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); } - for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) { - toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec)); + } + + // (No javadoc for private helper, as is standard) + private OllamaResult generateWithToolsInternal( + OllamaGenerateRequest request, OllamaGenerateStreamObserver streamObserver) + throws OllamaException { + ArrayList msgs = new ArrayList<>(); + OllamaChatRequest chatRequest = new OllamaChatRequest(); + chatRequest.setModel(request.getModel()); + OllamaChatMessage ocm = new OllamaChatMessage(); + ocm.setRole(OllamaChatMessageRole.USER); + ocm.setResponse(request.getPrompt()); + chatRequest.setMessages(msgs); + msgs.add(ocm); + OllamaChatTokenHandler hdlr = null; + chatRequest.setTools(request.getTools()); + if (streamObserver != null) { + chatRequest.setStream(true); + if (streamObserver.getResponseStreamHandler() != null) { + hdlr = + chatResponseModel -> + streamObserver + .getResponseStreamHandler() + .accept(chatResponseModel.getMessage().getResponse()); + } } - toolResult.setToolResults(toolResults); - return toolResult; + OllamaChatResult res = chat(chatRequest, hdlr); + return new OllamaResult( + res.getResponseModel().getMessage().getResponse(), + res.getResponseModel().getMessage().getThinking(), + res.getResponseModel().getTotalDuration(), + -1); } /** - * Asynchronously generates a response for a prompt using a model running on the - * Ollama server. - *

- * This method returns an {@link OllamaAsyncResultStreamer} handle that can be - * used to poll for - * status and retrieve streamed "thinking" and response tokens from the model. - * The call is non-blocking. - *

+ * Generates a response from a model asynchronously, returning a streamer for results. * - *

- * Example usage: - *

- * - *
{@code
-     * OllamaAsyncResultStreamer resultStreamer = ollamaAPI.generateAsync("gpt-oss:20b", "Who are you", false, true);
-     * int pollIntervalMilliseconds = 1000;
-     * while (true) {
-     *     String thinkingTokens = resultStreamer.getThinkingResponseStream().poll();
-     *     String responseTokens = resultStreamer.getResponseStream().poll();
-     *     System.out.print(thinkingTokens != null ? thinkingTokens.toUpperCase() : "");
-     *     System.out.print(responseTokens != null ? responseTokens.toLowerCase() : "");
-     *     Thread.sleep(pollIntervalMilliseconds);
-     *     if (!resultStreamer.isAlive())
-     *         break;
-     * }
-     * System.out.println("Complete thinking response: " + resultStreamer.getCompleteThinkingResponse());
-     * System.out.println("Complete response: " + resultStreamer.getCompleteResponse());
-     * }
- * - * @param model the Ollama model to use for generating the response - * @param prompt the prompt or question text to send to the model - * @param raw if {@code true}, returns the raw response from the model - * @param think if {@code true}, streams "thinking" tokens as well as response - * tokens - * @return an {@link OllamaAsyncResultStreamer} handle for polling and - * retrieving streamed results + * @param model the model name + * @param prompt the prompt to send + * @param raw whether to use raw mode + * @param think whether to use "think" mode + * @return an OllamaAsyncResultStreamer for streaming results + * @throws OllamaException if the request fails */ - public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw, boolean think) { - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); - ollamaRequestModel.setRaw(raw); - ollamaRequestModel.setThink(think); - URI uri = URI.create(this.host + "/api/generate"); - OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer( - getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); - ollamaAsyncResultStreamer.start(); - return ollamaAsyncResultStreamer; - } - - /** - * With one or more image files, ask a question to a model running on Ollama - * server. This is a - * sync/blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param imageFiles the list of image files to use for the question - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback consumer that will be applied every - * time a streamed response is received. If not set, the - * stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options, - OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { - List images = new ArrayList<>(); - for (File imageFile : imageFiles) { - images.add(encodeFileToBase64(imageFile)); + public OllamaAsyncResultStreamer generateAsync( + String model, String prompt, boolean raw, boolean think) throws OllamaException { + long startTime = System.currentTimeMillis(); + String url = "/api/generate"; + int statusCode = -1; + try { + OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); + ollamaRequestModel.setRaw(raw); + ollamaRequestModel.setThink(think); + OllamaAsyncResultStreamer ollamaAsyncResultStreamer = + new OllamaAsyncResultStreamer( + getRequestBuilderDefault(new URI(this.host + url)), + ollamaRequestModel, + requestTimeoutSeconds); + ollamaAsyncResultStreamer.start(); + statusCode = ollamaAsyncResultStreamer.getHttpStatusCode(); + return ollamaAsyncResultStreamer; + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + url, model, raw, think, true, null, null, startTime, statusCode, null); } - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel, null, streamHandler); } /** - * Convenience method to call Ollama API without streaming responses. - *

- * Uses - * {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} + * Sends a chat request to a model using an {@link OllamaChatRequest} and sets up streaming response. + * This can be constructed using an {@link OllamaChatRequestBuilder}. * - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generateWithImageFiles(String model, String prompt, List imageFiles, Options options) - throws OllamaBaseException, IOException, InterruptedException { - return generateWithImageFiles(model, prompt, imageFiles, options, null); - } - - /** - * With one or more image URLs, ask a question to a model running on Ollama - * server. This is a - * sync/blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param imageURLs the list of image URLs to use for the question - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback consumer that will be applied every - * time a streamed response is received. If not set, the - * stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed - */ - public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options, - OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - List images = new ArrayList<>(); - for (String imageURL : imageURLs) { - images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); - } - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel, null, streamHandler); - } - - /** - * Convenience method to call Ollama API without streaming responses. - *

- * Uses - * {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} - * - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - * @throws URISyntaxException if the URI for the request is malformed - */ - public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, Options options) - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - return generateWithImageURLs(model, prompt, imageURLs, options, null); - } - - /** - * Synchronously generates a response using a list of image byte arrays. - *

- * This method encodes the provided byte arrays into Base64 and sends them to - * the Ollama server. - * - * @param model the Ollama model to use for generating the response - * @param prompt the prompt or question text to send to the model - * @param images the list of image data as byte arrays - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback that will be invoked with each - * streamed response; if null, streaming is disabled - * @return OllamaResult containing the response text and the time taken for the - * response - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generateWithImages(String model, String prompt, List images, Options options, - OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { - List encodedImages = new ArrayList<>(); - for (byte[] image : images) { - encodedImages.add(encodeByteArrayToBase64(image)); - } - OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, encodedImages); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel, null, streamHandler); - } - - /** - * Convenience method to call the Ollama API using image byte arrays without - * streaming responses. - *

- * Uses - * {@link #generateWithImages(String, String, List, Options, OllamaStreamHandler)} - * - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaResult generateWithImages(String model, String prompt, List images, Options options) - throws OllamaBaseException, IOException, InterruptedException { - return generateWithImages(model, prompt, images, options, null); - } - - /** - * Ask a question to a model based on a given message stack (i.e. a chat - * history). Creates a synchronous call to the api - * 'api/chat'. - * - * @param model the ollama model to ask the question to - * @param messages chat history / message stack to send to the model - * @return {@link OllamaChatResult} containing the api response and the message - * history including the newly acquired assistant response. - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or - * network - * issues happen - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP - * request - * @throws InterruptedException if the operation is interrupted - * @throws ToolInvocationException if the tool invocation fails - */ - public OllamaChatResult chat(String model, List messages) - throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); - return chat(builder.withMessages(messages).build()); - } - - /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be - * constructed using an {@link OllamaChatRequestBuilder}. - *

- * Hint: the OllamaChatRequestModel#getStream() property is not implemented. + *

Note: the OllamaChatRequestModel#getStream() property is not implemented. * * @param request request object to be sent to the server + * @param tokenHandler callback handler to handle the last token from stream (caution: the + * previous tokens from stream will not be concatenated) * @return {@link OllamaChatResult} - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or - * network - * issues happen - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP - * request - * @throws InterruptedException if the operation is interrupted - * @throws ToolInvocationException if the tool invocation fails + * @throws OllamaException if the response indicates an error status */ - public OllamaChatResult chat(OllamaChatRequest request) - throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { - return chat(request, null, null); - } + public OllamaChatResult chat(OllamaChatRequest request, OllamaChatTokenHandler tokenHandler) + throws OllamaException { + try { + OllamaChatEndpointCaller requestCaller = + new OllamaChatEndpointCaller(host, auth, requestTimeoutSeconds); + OllamaChatResult result; - /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be - * constructed using an {@link OllamaChatRequestBuilder}. - *

- * Hint: the OllamaChatRequestModel#getStream() property is not implemented. - * - * @param request request object to be sent to the server - * @param responseStreamHandler callback handler to handle the last message from - * stream - * @param thinkingStreamHandler callback handler to handle the last thinking - * message from stream - * @return {@link OllamaChatResult} - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or - * network - * issues happen - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP - * request - * @throws InterruptedException if the operation is interrupted - * @throws ToolInvocationException if the tool invocation fails - */ - public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler thinkingStreamHandler, - OllamaStreamHandler responseStreamHandler) - throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { - return chatStreaming(request, new OllamaChatStreamObserver(thinkingStreamHandler, responseStreamHandler)); - } - - /** - * Ask a question to a model using an {@link OllamaChatRequest}. This can be - * constructed using an {@link OllamaChatRequestBuilder}. - *

- * Hint: the OllamaChatRequestModel#getStream() property is not implemented. - * - * @param request request object to be sent to the server - * @param tokenHandler callback handler to handle the last token from stream - * (caution: the previous tokens from stream will not be - * concatenated) - * @return {@link OllamaChatResult} - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network - * issues happen - * @throws OllamaBaseException if the response indicates an error status - * @throws IOException if an I/O error occurs during the HTTP request - * @throws InterruptedException if the operation is interrupted - */ - public OllamaChatResult chatStreaming(OllamaChatRequest request, OllamaTokenHandler tokenHandler) - throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { - OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, auth, requestTimeoutSeconds); - OllamaChatResult result; - - // add all registered tools to Request - request.setTools(toolRegistry.getRegisteredSpecs().stream().map(Tools.ToolSpecification::getToolPrompt) - .collect(Collectors.toList())); - - if (tokenHandler != null) { - request.setStream(true); - result = requestCaller.call(request, tokenHandler); - } else { - result = requestCaller.callSync(request); - } - - if (clientHandlesTools) { - return result; - } - - // check if toolCallIsWanted - List toolCalls = result.getResponseModel().getMessage().getToolCalls(); - int toolCallTries = 0; - while (toolCalls != null && !toolCalls.isEmpty() && toolCallTries < maxChatToolCallRetries) { - for (OllamaChatToolCalls toolCall : toolCalls) { - String toolName = toolCall.getFunction().getName(); - ToolFunction toolFunction = toolRegistry.getToolFunction(toolName); - if (toolFunction == null) { - throw new ToolInvocationException("Tool function not found: " + toolName); - } - Map arguments = toolCall.getFunction().getArguments(); - Object res = toolFunction.apply(arguments); - String argumentKeys = arguments.keySet().stream() - .map(Object::toString) - .collect(Collectors.joining(", ")); - request.getMessages().add(new OllamaChatMessage(OllamaChatMessageRole.TOOL, - "[TOOL_RESULTS] " + toolName + "(" + argumentKeys + "): " + res + " [/TOOL_RESULTS]")); + // only add tools if tools flag is set + if (request.isUseTools()) { + // add all registered tools to request + request.setTools(toolRegistry.getRegisteredTools()); } if (tokenHandler != null) { + request.setStream(true); result = requestCaller.call(request, tokenHandler); } else { result = requestCaller.callSync(request); } - toolCalls = result.getResponseModel().getMessage().getToolCalls(); - toolCallTries++; - } - return result; - } - - /** - * Registers a single tool in the tool registry using the provided tool - * specification. - * - * @param toolSpecification the specification of the tool to register. It - * contains the - * tool's function name and other relevant information. - */ - public void registerTool(Tools.ToolSpecification toolSpecification) { - toolRegistry.addTool(toolSpecification.getFunctionName(), toolSpecification); - LOG.debug("Registered tool: {}", toolSpecification.getFunctionName()); - } - - /** - * Registers multiple tools in the tool registry using a list of tool - * specifications. - * Iterates over the list and adds each tool specification to the registry. - * - * @param toolSpecifications a list of tool specifications to register. Each - * specification - * contains information about a tool, such as its - * function name. - */ - public void registerTools(List toolSpecifications) { - for (Tools.ToolSpecification toolSpecification : toolSpecifications) { - toolRegistry.addTool(toolSpecification.getFunctionName(), toolSpecification); + // check if toolCallIsWanted + List toolCalls = + result.getResponseModel().getMessage().getToolCalls(); + int toolCallTries = 0; + while (toolCalls != null + && !toolCalls.isEmpty() + && toolCallTries < maxChatToolCallRetries) { + for (OllamaChatToolCalls toolCall : toolCalls) { + String toolName = toolCall.getFunction().getName(); + for (Tools.Tool t : request.getTools()) { + if (t.getToolSpec().getName().equals(toolName)) { + ToolFunction toolFunction = t.getToolFunction(); + if (toolFunction == null) { + throw new ToolInvocationException( + "Tool function not found: " + toolName); + } + LOG.debug( + "Invoking tool {} with arguments: {}", + toolCall.getFunction().getName(), + toolCall.getFunction().getArguments()); + Map arguments = toolCall.getFunction().getArguments(); + Object res = toolFunction.apply(arguments); + String argumentKeys = + arguments.keySet().stream() + .map(Object::toString) + .collect(Collectors.joining(", ")); + request.getMessages() + .add( + new OllamaChatMessage( + OllamaChatMessageRole.TOOL, + "[TOOL_RESULTS] " + + toolName + + "(" + + argumentKeys + + "): " + + res + + " [/TOOL_RESULTS]")); + } + } + } + if (tokenHandler != null) { + result = requestCaller.call(request, tokenHandler); + } else { + result = requestCaller.callSync(request); + } + toolCalls = result.getResponseModel().getMessage().getToolCalls(); + toolCallTries++; + } + return result; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", e); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); } } /** - * Deregisters all tools from the tool registry. - * This method removes all registered tools, effectively clearing the registry. + * Registers a single tool in the tool registry. + * + * @param tool the tool to register. Contains the tool's specification and function. + */ + public void registerTool(Tools.Tool tool) { + toolRegistry.addTool(tool); + LOG.debug("Registered tool: {}", tool.getToolSpec().getName()); + } + + /** + * Registers multiple tools in the tool registry. + * + * @param tools a list of {@link Tools.Tool} objects to register. Each tool contains its + * specification and function. + */ + public void registerTools(List tools) { + toolRegistry.addTools(tools); + } + + /** + * Deregisters all tools from the tool registry. This method removes all registered tools, + * effectively clearing the registry. */ public void deregisterTools() { toolRegistry.clear(); @@ -1432,53 +974,50 @@ public class OllamaAPI { } /** - * Registers tools based on the annotations found on the methods of the caller's - * class and its providers. - * This method scans the caller's class for the {@link OllamaToolService} - * annotation and recursively registers - * annotated tools from all the providers specified in the annotation. + * Registers tools based on the annotations found on the methods of the caller's class and its + * providers. This method scans the caller's class for the {@link OllamaToolService} annotation + * and recursively registers annotated tools from all the providers specified in the annotation. * - * @throws IllegalStateException if the caller's class is not annotated with - * {@link OllamaToolService}. - * @throws RuntimeException if any reflection-based instantiation or - * invocation fails. + * @throws OllamaException if the caller's class is not annotated with {@link + * OllamaToolService} or if reflection-based instantiation or invocation fails */ - public void registerAnnotatedTools() { + public void registerAnnotatedTools() throws OllamaException { try { Class callerClass = null; try { - callerClass = Class.forName(Thread.currentThread().getStackTrace()[2].getClassName()); + callerClass = + Class.forName(Thread.currentThread().getStackTrace()[2].getClassName()); } catch (ClassNotFoundException e) { - throw new RuntimeException(e); + throw new OllamaException(e.getMessage(), e); } - OllamaToolService ollamaToolServiceAnnotation = callerClass.getDeclaredAnnotation(OllamaToolService.class); + OllamaToolService ollamaToolServiceAnnotation = + callerClass.getDeclaredAnnotation(OllamaToolService.class); if (ollamaToolServiceAnnotation == null) { - throw new IllegalStateException(callerClass + " is not annotated as " + OllamaToolService.class); + throw new IllegalStateException( + callerClass + " is not annotated as " + OllamaToolService.class); } Class[] providers = ollamaToolServiceAnnotation.providers(); for (Class provider : providers) { registerAnnotatedTools(provider.getDeclaredConstructor().newInstance()); } - } catch (InstantiationException | NoSuchMethodException | IllegalAccessException - | InvocationTargetException e) { - throw new RuntimeException(e); + } catch (InstantiationException + | NoSuchMethodException + | IllegalAccessException + | InvocationTargetException e) { + throw new OllamaException(e.getMessage()); } } /** - * Registers tools based on the annotations found on the methods of the provided - * object. - * This method scans the methods of the given object and registers tools using - * the {@link ToolSpec} annotation - * and associated {@link ToolProperty} annotations. It constructs tool - * specifications and stores them in a tool registry. + * Registers tools based on the annotations found on the methods of the provided object. + * This method scans the methods of the given object and registers tools using the {@link ToolSpec} + * annotation and associated {@link ToolProperty} annotations. It constructs tool specifications + * and stores them in a tool registry. * - * @param object the object whose methods are to be inspected for annotated - * tools. - * @throws RuntimeException if any reflection-based instantiation or invocation - * fails. + * @param object the object whose methods are to be inspected for annotated tools + * @throws RuntimeException if any reflection-based instantiation or invocation fails */ public void registerAnnotatedTools(Object object) { Class objectClass = object.getClass(); @@ -1491,39 +1030,43 @@ public class OllamaAPI { String operationName = !toolSpec.name().isBlank() ? toolSpec.name() : m.getName(); String operationDesc = !toolSpec.desc().isBlank() ? toolSpec.desc() : operationName; - final Tools.PropsBuilder propsBuilder = new Tools.PropsBuilder(); + final Map params = new HashMap() {}; LinkedHashMap methodParams = new LinkedHashMap<>(); for (Parameter parameter : m.getParameters()) { - final ToolProperty toolPropertyAnn = parameter.getDeclaredAnnotation(ToolProperty.class); + final ToolProperty toolPropertyAnn = + parameter.getDeclaredAnnotation(ToolProperty.class); String propType = parameter.getType().getTypeName(); if (toolPropertyAnn == null) { methodParams.put(parameter.getName(), null); continue; } - String propName = !toolPropertyAnn.name().isBlank() ? toolPropertyAnn.name() : parameter.getName(); + String propName = + !toolPropertyAnn.name().isBlank() + ? toolPropertyAnn.name() + : parameter.getName(); methodParams.put(propName, propType); - propsBuilder.withProperty(propName, Tools.PromptFuncDefinition.Property.builder().type(propType) - .description(toolPropertyAnn.desc()).required(toolPropertyAnn.required()).build()); + params.put( + propName, + Tools.Property.builder() + .type(propType) + .description(toolPropertyAnn.desc()) + .required(toolPropertyAnn.required()) + .build()); } - final Map params = propsBuilder.build(); - List reqProps = params.entrySet().stream().filter(e -> e.getValue().isRequired()) - .map(Map.Entry::getKey).collect(Collectors.toList()); - - Tools.ToolSpecification toolSpecification = Tools.ToolSpecification.builder().functionName(operationName) - .functionDescription(operationDesc) - .toolPrompt(Tools.PromptFuncDefinition.builder().type("function") - .function(Tools.PromptFuncDefinition.PromptFuncSpec.builder().name(operationName) - .description(operationDesc).parameters(Tools.PromptFuncDefinition.Parameters - .builder().type("object").properties(params).required(reqProps).build()) - .build()) - .build()) - .build(); - - ReflectionalToolFunction reflectionalToolFunction = new ReflectionalToolFunction(object, m, methodParams); - toolSpecification.setToolFunction(reflectionalToolFunction); - toolRegistry.addTool(toolSpecification.getFunctionName(), toolSpecification); + Tools.ToolSpec toolSpecification = + Tools.ToolSpec.builder() + .name(operationName) + .description(operationDesc) + .parameters(Tools.Parameters.of(params)) + .build(); + ReflectionalToolFunction reflectionalToolFunction = + new ReflectionalToolFunction(object, m, methodParams); + toolRegistry.addTool( + Tools.Tool.builder() + .toolFunction(reflectionalToolFunction) + .toolSpec(toolSpecification) + .build()); } - } /** @@ -1550,8 +1093,7 @@ public class OllamaAPI { * * @param roleName the name of the role to retrieve * @return the OllamaChatMessageRole associated with the given name - * @throws RoleNotFoundException if the role with the specified name does not - * exist + * @throws RoleNotFoundException if the role with the specified name does not exist */ public OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { return OllamaChatMessageRole.getRole(roleName); @@ -1562,9 +1104,9 @@ public class OllamaAPI { /** * Utility method to encode a file into a Base64 encoded string. * - * @param file the file to be encoded into Base64. - * @return a Base64 encoded string representing the contents of the file. - * @throws IOException if an I/O error occurs during reading the file. + * @param file the file to be encoded into Base64 + * @return a Base64 encoded string representing the contents of the file + * @throws IOException if an I/O error occurs during reading the file */ private static String encodeFileToBase64(File file) throws IOException { return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); @@ -1573,8 +1115,8 @@ public class OllamaAPI { /** * Utility method to encode a byte array into a Base64 encoded string. * - * @param bytes the byte array to be encoded into Base64. - * @return a Base64 encoded string representing the byte array. + * @param bytes the byte array to be encoded into Base64 + * @return a Base64 encoded string representing the byte array */ private static String encodeByteArrayToBase64(byte[] bytes) { return Base64.getEncoder().encodeToString(bytes); @@ -1582,36 +1124,56 @@ public class OllamaAPI { /** * Generates a request for the Ollama API and returns the result. - * This method synchronously calls the Ollama API. If a stream handler is - * provided, - * the request will be streamed; otherwise, a regular synchronous request will - * be made. + * This method synchronously calls the Ollama API. If a stream handler is provided, + * the request will be streamed; otherwise, a regular synchronous request will be made. * - * @param ollamaRequestModel the request model containing necessary - * parameters - * for the Ollama API request. - * @param responseStreamHandler the stream handler to process streaming - * responses, - * or null for non-streaming requests. - * @return the result of the Ollama API request. - * @throws OllamaBaseException if the request fails due to an issue with the - * Ollama API. - * @throws IOException if an I/O error occurs during the request - * process. - * @throws InterruptedException if the thread is interrupted during the request. + * @param ollamaRequestModel the request model containing necessary parameters for the Ollama API request + * @param thinkingStreamHandler the stream handler for "thinking" tokens, or null if not used + * @param responseStreamHandler the stream handler to process streaming responses, or null for non-streaming requests + * @return the result of the Ollama API request + * @throws OllamaException if the request fails due to an issue with the Ollama API */ - private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, - OllamaStreamHandler thinkingStreamHandler, OllamaStreamHandler responseStreamHandler) - throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds); - OllamaResult result; - if (responseStreamHandler != null) { - ollamaRequestModel.setStream(true); - result = requestCaller.call(ollamaRequestModel, thinkingStreamHandler, responseStreamHandler); - } else { - result = requestCaller.callSync(ollamaRequestModel); + private OllamaResult generateSyncForOllamaRequestModel( + OllamaGenerateRequest ollamaRequestModel, + OllamaGenerateTokenHandler thinkingStreamHandler, + OllamaGenerateTokenHandler responseStreamHandler) + throws OllamaException { + long startTime = System.currentTimeMillis(); + int statusCode = -1; + Object out = null; + try { + OllamaGenerateEndpointCaller requestCaller = + new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds); + OllamaResult result; + if (responseStreamHandler != null) { + ollamaRequestModel.setStream(true); + result = + requestCaller.call( + ollamaRequestModel, thinkingStreamHandler, responseStreamHandler); + } else { + result = requestCaller.callSync(ollamaRequestModel); + } + statusCode = result.getHttpStatusCode(); + out = result; + return result; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new OllamaException("Thread was interrupted", e); + } catch (Exception e) { + throw new OllamaException(e.getMessage(), e); + } finally { + MetricsRecorder.record( + OllamaGenerateEndpointCaller.endpoint, + ollamaRequestModel.getModel(), + ollamaRequestModel.isRaw(), + ollamaRequestModel.isThink(), + ollamaRequestModel.isStream(), + ollamaRequestModel.getOptions(), + ollamaRequestModel.getFormat(), + startTime, + statusCode, + out); } - return result; } /** @@ -1621,9 +1183,12 @@ public class OllamaAPI { * @return HttpRequest.Builder */ private HttpRequest.Builder getRequestBuilderDefault(URI uri) { - HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) - .timeout(Duration.ofSeconds(requestTimeoutSeconds)); + HttpRequest.Builder requestBuilder = + HttpRequest.newBuilder(uri) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .timeout(Duration.ofSeconds(requestTimeoutSeconds)); if (isAuthSet()) { requestBuilder.header("Authorization", auth.getAuthHeaderValue()); } @@ -1638,20 +1203,4 @@ public class OllamaAPI { private boolean isAuthSet() { return auth != null; } - - private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { - try { - String methodName = toolFunctionCallSpec.getName(); - Map arguments = toolFunctionCallSpec.getArguments(); - ToolFunction function = toolRegistry.getToolFunction(methodName); - LOG.debug("Invoking function {} with arguments {}", methodName, arguments); - if (function == null) { - throw new ToolNotFoundException( - "No such tool: " + methodName + ". Please register the tool before invoking it."); - } - return function.apply(arguments); - } catch (Exception e) { - throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e); - } - } } diff --git a/src/main/java/io/github/ollama4j/exceptions/OllamaBaseException.java b/src/main/java/io/github/ollama4j/exceptions/OllamaBaseException.java deleted file mode 100644 index 9474d72..0000000 --- a/src/main/java/io/github/ollama4j/exceptions/OllamaBaseException.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.github.ollama4j.exceptions; - -public class OllamaBaseException extends Exception { - - public OllamaBaseException(String s) { - super(s); - } -} diff --git a/src/main/java/io/github/ollama4j/exceptions/OllamaException.java b/src/main/java/io/github/ollama4j/exceptions/OllamaException.java new file mode 100644 index 0000000..7570c10 --- /dev/null +++ b/src/main/java/io/github/ollama4j/exceptions/OllamaException.java @@ -0,0 +1,20 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.exceptions; + +public class OllamaException extends Exception { + + public OllamaException(String message) { + super(message); + } + + public OllamaException(String message, Exception exception) { + super(message, exception); + } +} diff --git a/src/main/java/io/github/ollama4j/exceptions/RoleNotFoundException.java b/src/main/java/io/github/ollama4j/exceptions/RoleNotFoundException.java index a7d1d18..11c6370 100644 --- a/src/main/java/io/github/ollama4j/exceptions/RoleNotFoundException.java +++ b/src/main/java/io/github/ollama4j/exceptions/RoleNotFoundException.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.exceptions; public class RoleNotFoundException extends Exception { diff --git a/src/main/java/io/github/ollama4j/exceptions/ToolInvocationException.java b/src/main/java/io/github/ollama4j/exceptions/ToolInvocationException.java index 4707e55..1bcb8f9 100644 --- a/src/main/java/io/github/ollama4j/exceptions/ToolInvocationException.java +++ b/src/main/java/io/github/ollama4j/exceptions/ToolInvocationException.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.exceptions; public class ToolInvocationException extends Exception { diff --git a/src/main/java/io/github/ollama4j/exceptions/ToolNotFoundException.java b/src/main/java/io/github/ollama4j/exceptions/ToolNotFoundException.java index bd3e007..28e4b7f 100644 --- a/src/main/java/io/github/ollama4j/exceptions/ToolNotFoundException.java +++ b/src/main/java/io/github/ollama4j/exceptions/ToolNotFoundException.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.exceptions; public class ToolNotFoundException extends Exception { diff --git a/src/main/java/io/github/ollama4j/impl/ConsoleOutputChatTokenHandler.java b/src/main/java/io/github/ollama4j/impl/ConsoleOutputChatTokenHandler.java new file mode 100644 index 0000000..ea0f728 --- /dev/null +++ b/src/main/java/io/github/ollama4j/impl/ConsoleOutputChatTokenHandler.java @@ -0,0 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.impl; + +import io.github.ollama4j.models.chat.OllamaChatStreamObserver; + +public final class ConsoleOutputChatTokenHandler extends OllamaChatStreamObserver { + public ConsoleOutputChatTokenHandler() { + setThinkingStreamHandler(new ConsoleOutputGenerateTokenHandler()); + setResponseStreamHandler(new ConsoleOutputGenerateTokenHandler()); + } +} diff --git a/src/main/java/io/github/ollama4j/impl/ConsoleOutputGenerateTokenHandler.java b/src/main/java/io/github/ollama4j/impl/ConsoleOutputGenerateTokenHandler.java new file mode 100644 index 0000000..b303315 --- /dev/null +++ b/src/main/java/io/github/ollama4j/impl/ConsoleOutputGenerateTokenHandler.java @@ -0,0 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.impl; + +import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler; + +public class ConsoleOutputGenerateTokenHandler implements OllamaGenerateTokenHandler { + @Override + public void accept(String message) { + System.out.print(message); + } +} diff --git a/src/main/java/io/github/ollama4j/impl/ConsoleOutputStreamHandler.java b/src/main/java/io/github/ollama4j/impl/ConsoleOutputStreamHandler.java deleted file mode 100644 index b5b3da8..0000000 --- a/src/main/java/io/github/ollama4j/impl/ConsoleOutputStreamHandler.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.github.ollama4j.impl; - -import io.github.ollama4j.models.generate.OllamaStreamHandler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ConsoleOutputStreamHandler implements OllamaStreamHandler { - private static final Logger LOG = LoggerFactory.getLogger(ConsoleOutputStreamHandler.class); - - @Override - public void accept(String message) { - LOG.info(message); - } -} diff --git a/src/main/java/io/github/ollama4j/metrics/MetricsRecorder.java b/src/main/java/io/github/ollama4j/metrics/MetricsRecorder.java new file mode 100644 index 0000000..bfd6ef1 --- /dev/null +++ b/src/main/java/io/github/ollama4j/metrics/MetricsRecorder.java @@ -0,0 +1,129 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.metrics; + +import com.google.common.base.Throwables; +import io.prometheus.client.Counter; +import io.prometheus.client.Histogram; +import java.util.Map; + +public class MetricsRecorder { + + // Corrected: Removed duplicate "format" label and ensured label count matches usage + private static final Counter requests = + Counter.build() + .name("ollama_api_requests_total") + .help("Total requests to Ollama API") + .labelNames( + "endpoint", + "model", + "raw", + "streaming", + "thinking", + "http_status", + "options", + "format") + .register(); + + private static final Histogram requestLatency = + Histogram.build() + .name("ollama_api_request_duration_seconds") + .help("Request latency in seconds") + .labelNames( + "endpoint", + "model", + "raw", + "streaming", + "thinking", + "http_status", + "options", + "format") + .register(); + + private static final Histogram responseSize = + Histogram.build() + .name("ollama_api_response_size_bytes") + .help("Response size in bytes") + .labelNames("endpoint", "model", "options") + .register(); + + public static void record( + String endpoint, + String model, + boolean raw, + boolean thinking, + boolean streaming, + Map options, + Object format, + long startTime, + int responseHttpStatus, + Object response) { + long endTime = System.currentTimeMillis(); + + String httpStatus = String.valueOf(responseHttpStatus); + + String formatString = ""; + if (format instanceof String) { + formatString = (String) format; + } else if (format instanceof Map) { + formatString = mapToString((Map) format); + } else if (format != null) { + formatString = format.toString(); + } + + // Ensure the number of labels matches the labelNames above (8 labels) + requests.labels( + endpoint, + safe(model), + String.valueOf(raw), + String.valueOf(streaming), + String.valueOf(thinking), + httpStatus, + safe(mapToString(options)), + safe(formatString)) + .inc(); + double durationSeconds = (endTime - startTime) / 1000.0; + + // Ensure the number of labels matches the labelNames above (8 labels) + requestLatency + .labels( + endpoint, + safe(model), + String.valueOf(raw), + String.valueOf(streaming), + String.valueOf(thinking), + httpStatus, + safe(mapToString(options)), + safe(formatString)) + .observe(durationSeconds); + + // Record response size (only if response is a string or json-like object) + if (response != null) { + if (response instanceof Exception) { + response = Throwables.getStackTraceAsString((Throwable) response); + } + int size = response.toString().length(); + responseSize.labels(endpoint, safe(model), safe(mapToString(options))).observe(size); + } + } + + // Utility method to convert options Map to string (you can adjust this for more detailed + // representation) + private static String mapToString(Map map) { + if (map == null || map.isEmpty()) { + return "none"; + } + // Convert the map to a string (can be customized to fit the use case) + return map.toString(); + } + + private static String safe(String value) { + return (value == null || value.isEmpty()) ? "none" : value; + } +} diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessage.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessage.java index e3d7912..ef1b3da 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessage.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessage.java @@ -1,21 +1,31 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import io.github.ollama4j.utils.FileToBase64Serializer; -import lombok.*; - import java.util.List; - -import static io.github.ollama4j.utils.Utils.getObjectMapper; +import lombok.*; /** * Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint. * - * @see Generate chat completion + * @see Generate + * chat completion */ +@SuppressWarnings("NullableProblems") @Data @AllArgsConstructor @RequiredArgsConstructor @@ -23,11 +33,11 @@ import static io.github.ollama4j.utils.Utils.getObjectMapper; @JsonIgnoreProperties(ignoreUnknown = true) public class OllamaChatMessage { - @NonNull - private OllamaChatMessageRole role; + @NonNull private OllamaChatMessageRole role; + @JsonProperty("content") @NonNull - private String content; + private String response; private String thinking; diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessageRole.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessageRole.java index 37d9d5c..617fb51 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessageRole.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatMessageRole.java @@ -1,11 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; import com.fasterxml.jackson.annotation.JsonValue; import io.github.ollama4j.exceptions.RoleNotFoundException; -import lombok.Getter; - import java.util.ArrayList; import java.util.List; +import lombok.Getter; /** * Defines the possible Chat Message roles. @@ -19,8 +26,7 @@ public class OllamaChatMessageRole { public static final OllamaChatMessageRole ASSISTANT = new OllamaChatMessageRole("assistant"); public static final OllamaChatMessageRole TOOL = new OllamaChatMessageRole("tool"); - @JsonValue - private final String roleName; + @JsonValue private final String roleName; private OllamaChatMessageRole(String roleName) { this.roleName = roleName; @@ -28,8 +34,6 @@ public class OllamaChatMessageRole { } public static OllamaChatMessageRole newCustomRole(String roleName) { -// OllamaChatMessageRole customRole = new OllamaChatMessageRole(roleName); -// roles.add(customRole); return new OllamaChatMessageRole(roleName); } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java index 7b19e02..a10cf77 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequest.java @@ -1,32 +1,50 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; import io.github.ollama4j.models.request.OllamaCommonRequest; import io.github.ollama4j.tools.Tools; import io.github.ollama4j.utils.OllamaRequestBody; +import java.util.Collections; +import java.util.List; import lombok.Getter; import lombok.Setter; -import java.util.List; - /** * Defines a Request to use against the ollama /api/chat endpoint. * * @see Generate - * Chat Completion + * "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate + * Chat Completion */ @Getter @Setter public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody { - private List messages; + private List messages = Collections.emptyList(); - private List tools; + private List tools; private boolean think; - public OllamaChatRequest() { - } + /** + * Controls whether tools are automatically executed. + * + *

If set to {@code true} (the default), tools will be automatically used/applied by the + * library. If set to {@code false}, tool calls will be returned to the client for manual + * handling. + * + *

Disabling this should be an explicit operation. + */ + private boolean useTools = true; + + public OllamaChatRequest() {} public OllamaChatRequest(String model, boolean think, List messages) { this.model = model; @@ -42,5 +60,4 @@ public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequ return this.toString().equals(o.toString()); } - } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java index 4a9caf9..f72759f 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatRequestBuilder.java @@ -1,38 +1,59 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; import io.github.ollama4j.utils.Options; import io.github.ollama4j.utils.Utils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.IOException; -import java.net.URISyntaxException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import lombok.Setter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern. - */ +/** Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern. */ public class OllamaChatRequestBuilder { private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); - private OllamaChatRequestBuilder(String model, List messages) { - request = new OllamaChatRequest(model, false, messages); - } - + private int imageURLConnectTimeoutSeconds = 10; + private int imageURLReadTimeoutSeconds = 10; private OllamaChatRequest request; + @Setter private boolean useTools = true; - public static OllamaChatRequestBuilder getInstance(String model) { - return new OllamaChatRequestBuilder(model, new ArrayList<>()); + private OllamaChatRequestBuilder() { + request = new OllamaChatRequest(); + request.setMessages(new ArrayList<>()); } - public OllamaChatRequest build() { - return request; + public static OllamaChatRequestBuilder builder() { + return new OllamaChatRequestBuilder(); + } + + public OllamaChatRequestBuilder withImageURLConnectTimeoutSeconds( + int imageURLConnectTimeoutSeconds) { + this.imageURLConnectTimeoutSeconds = imageURLConnectTimeoutSeconds; + return this; + } + + public OllamaChatRequestBuilder withImageURLReadTimeoutSeconds(int imageURLReadTimeoutSeconds) { + this.imageURLReadTimeoutSeconds = imageURLReadTimeoutSeconds; + return this; + } + + public OllamaChatRequestBuilder withModel(String model) { + request.setModel(model); + return this; } public void reset() { @@ -43,50 +64,79 @@ public class OllamaChatRequestBuilder { return withMessage(role, content, Collections.emptyList()); } - public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List toolCalls) { + public OllamaChatRequestBuilder withMessage( + OllamaChatMessageRole role, String content, List toolCalls) { List messages = this.request.getMessages(); messages.add(new OllamaChatMessage(role, content, null, toolCalls, null)); return this; } - public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List toolCalls, List images) { + public OllamaChatRequestBuilder withMessage( + OllamaChatMessageRole role, + String content, + List toolCalls, + List images) { List messages = this.request.getMessages(); - - List binaryImages = images.stream().map(file -> { - try { - return Files.readAllBytes(file.toPath()); - } catch (IOException e) { - LOG.warn("File '{}' could not be accessed, will not add to message!", file.toPath(), e); - return new byte[0]; - } - }).collect(Collectors.toList()); - + List binaryImages = + images.stream() + .map( + file -> { + try { + return Files.readAllBytes(file.toPath()); + } catch (IOException e) { + LOG.warn( + "File '{}' could not be accessed, will not add to" + + " message!", + file.toPath(), + e); + return new byte[0]; + } + }) + .collect(Collectors.toList()); messages.add(new OllamaChatMessage(role, content, null, toolCalls, binaryImages)); return this; } - public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List toolCalls, String... imageUrls) { + public OllamaChatRequestBuilder withMessage( + OllamaChatMessageRole role, + String content, + List toolCalls, + String... imageUrls) + throws IOException, InterruptedException { List messages = this.request.getMessages(); List binaryImages = null; if (imageUrls.length > 0) { binaryImages = new ArrayList<>(); for (String imageUrl : imageUrls) { try { - binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); - } catch (URISyntaxException e) { - LOG.warn("URL '{}' could not be accessed, will not add to message!", imageUrl, e); + binaryImages.add( + Utils.loadImageBytesFromUrl( + imageUrl, + imageURLConnectTimeoutSeconds, + imageURLReadTimeoutSeconds)); + } catch (InterruptedException e) { + LOG.error("Failed to load image from URL: '{}'. Cause: {}", imageUrl, e); + Thread.currentThread().interrupt(); + throw new InterruptedException( + "Interrupted while loading image from URL: " + imageUrl); } catch (IOException e) { - LOG.warn("Content of URL '{}' could not be read, will not add to message!", imageUrl, e); + LOG.error( + "IOException occurred while loading image from URL '{}'. Cause: {}", + imageUrl, + e.getMessage(), + e); + throw new IOException( + "IOException while loading image from URL: " + imageUrl, e); } } } - messages.add(new OllamaChatMessage(role, content, null, toolCalls, binaryImages)); return this; } public OllamaChatRequestBuilder withMessages(List messages) { - return new OllamaChatRequestBuilder(request.getModel(), messages); + request.setMessages(messages); + return this; } public OllamaChatRequestBuilder withOptions(Options options) { @@ -95,7 +145,7 @@ public class OllamaChatRequestBuilder { } public OllamaChatRequestBuilder withGetJsonResponse() { - this.request.setReturnFormatJson(true); + this.request.setFormat("json"); return this; } @@ -118,4 +168,9 @@ public class OllamaChatRequestBuilder { this.request.setThink(think); return this; } + + public OllamaChatRequest build() { + request.setUseTools(useTools); + return request; + } } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatResponseModel.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatResponseModel.java index 2ccc731..5c05a94 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatResponseModel.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatResponseModel.java @@ -1,18 +1,25 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; import lombok.Data; -import java.util.List; - @Data +@JsonIgnoreProperties(ignoreUnknown = true) public class OllamaChatResponseModel { private String model; private @JsonProperty("created_at") String createdAt; private @JsonProperty("done_reason") String doneReason; - private OllamaChatMessage message; private boolean done; - private String error; private List context; private @JsonProperty("total_duration") Long totalDuration; private @JsonProperty("load_duration") Long loadDuration; @@ -20,4 +27,6 @@ public class OllamaChatResponseModel { private @JsonProperty("eval_duration") Long evalDuration; private @JsonProperty("prompt_eval_count") Integer promptEvalCount; private @JsonProperty("eval_count") Integer evalCount; + private String error; + private OllamaChatMessage message; } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatResult.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatResult.java index 5fbf7e3..db0ddf2 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatResult.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatResult.java @@ -1,12 +1,19 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; -import com.fasterxml.jackson.core.JsonProcessingException; -import lombok.Getter; - -import java.util.List; - import static io.github.ollama4j.utils.Utils.getObjectMapper; +import com.fasterxml.jackson.core.JsonProcessingException; +import java.util.List; +import lombok.Getter; + /** * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the * {@link OllamaChatMessageRole#ASSISTANT} role. @@ -18,7 +25,8 @@ public class OllamaChatResult { private final OllamaChatResponseModel responseModel; - public OllamaChatResult(OllamaChatResponseModel responseModel, List chatHistory) { + public OllamaChatResult( + OllamaChatResponseModel responseModel, List chatHistory) { this.chatHistory = chatHistory; this.responseModel = responseModel; appendAnswerToChatHistory(responseModel); @@ -36,19 +44,4 @@ public class OllamaChatResult { throw new RuntimeException(e); } } - - @Deprecated - public String getResponse(){ - return responseModel != null ? responseModel.getMessage().getContent() : ""; - } - - @Deprecated - public int getHttpStatusCode(){ - return 200; - } - - @Deprecated - public long getResponseTime(){ - return responseModel != null ? responseModel.getTotalDuration() : 0L; - } } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatStreamObserver.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatStreamObserver.java index 2ccdb74..776b006 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatStreamObserver.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatStreamObserver.java @@ -1,15 +1,24 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; -import io.github.ollama4j.models.generate.OllamaStreamHandler; -import io.github.ollama4j.models.generate.OllamaTokenHandler; -import lombok.RequiredArgsConstructor; +import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler; +import lombok.AllArgsConstructor; +import lombok.NoArgsConstructor; +import lombok.Setter; -@RequiredArgsConstructor -public class OllamaChatStreamObserver implements OllamaTokenHandler { - private final OllamaStreamHandler thinkingStreamHandler; - private final OllamaStreamHandler responseStreamHandler; - - private String message = ""; +@Setter +@NoArgsConstructor +@AllArgsConstructor +public class OllamaChatStreamObserver implements OllamaChatTokenHandler { + private OllamaGenerateTokenHandler thinkingStreamHandler; + private OllamaGenerateTokenHandler responseStreamHandler; @Override public void accept(OllamaChatResponseModel token) { @@ -18,34 +27,15 @@ public class OllamaChatStreamObserver implements OllamaTokenHandler { } String thinking = token.getMessage().getThinking(); - String content = token.getMessage().getContent(); + String response = token.getMessage().getResponse(); boolean hasThinking = thinking != null && !thinking.isEmpty(); - boolean hasContent = !content.isEmpty(); + boolean hasResponse = response != null && !response.isEmpty(); -// if (hasThinking && !hasContent) { -//// message += thinking; -// message = thinking; -// } else { -//// message += content; -// message = content; -// } -// -// responseStreamHandler.accept(message); - - - if (!hasContent && hasThinking && thinkingStreamHandler != null) { - // message = message + thinking; - - // use only new tokens received, instead of appending the tokens to the previous - // ones and sending the full string again + if (!hasResponse && hasThinking && thinkingStreamHandler != null) { thinkingStreamHandler.accept(thinking); - } else if (hasContent && responseStreamHandler != null) { - // message = message + response; - - // use only new tokens received, instead of appending the tokens to the previous - // ones and sending the full string again - responseStreamHandler.accept(content); + } else if (hasResponse) { + responseStreamHandler.accept(response); } } } diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatTokenHandler.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatTokenHandler.java new file mode 100644 index 0000000..fba39df --- /dev/null +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatTokenHandler.java @@ -0,0 +1,13 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.models.chat; + +import java.util.function.Consumer; + +public interface OllamaChatTokenHandler extends Consumer {} diff --git a/src/main/java/io/github/ollama4j/models/chat/OllamaChatToolCalls.java b/src/main/java/io/github/ollama4j/models/chat/OllamaChatToolCalls.java index de1a081..29faeb1 100644 --- a/src/main/java/io/github/ollama4j/models/chat/OllamaChatToolCalls.java +++ b/src/main/java/io/github/ollama4j/models/chat/OllamaChatToolCalls.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.chat; import io.github.ollama4j.tools.OllamaToolCallsFunction; @@ -11,6 +19,4 @@ import lombok.NoArgsConstructor; public class OllamaChatToolCalls { private OllamaToolCallsFunction function; - - } diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestModel.java b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequest.java similarity index 61% rename from src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestModel.java rename to src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequest.java index 8cb2002..8c2fae8 100644 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestModel.java +++ b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequest.java @@ -1,26 +1,29 @@ -package io.github.ollama4j.models.embeddings; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -import java.util.List; -import java.util.Map; +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.models.embed; import static io.github.ollama4j.utils.Utils.getObjectMapper; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import java.util.List; +import java.util.Map; +import lombok.*; + +@SuppressWarnings("NullableProblems") @Data @RequiredArgsConstructor @NoArgsConstructor -public class OllamaEmbedRequestModel { - @NonNull - private String model; +public class OllamaEmbedRequest { + @NonNull private String model; - @NonNull - private List input; + @NonNull private List input; private Map options; diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestBuilder.java b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequestBuilder.java similarity index 60% rename from src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestBuilder.java rename to src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequestBuilder.java index 83c619d..8e551ca 100644 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedRequestBuilder.java +++ b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedRequestBuilder.java @@ -1,7 +1,14 @@ -package io.github.ollama4j.models.embeddings; +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.models.embed; import io.github.ollama4j.utils.Options; - import java.util.List; /** @@ -9,32 +16,32 @@ import java.util.List; */ public class OllamaEmbedRequestBuilder { - private final OllamaEmbedRequestModel request; + private final OllamaEmbedRequest request; private OllamaEmbedRequestBuilder(String model, List input) { - this.request = new OllamaEmbedRequestModel(model,input); + this.request = new OllamaEmbedRequest(model, input); } - public static OllamaEmbedRequestBuilder getInstance(String model, String... input){ + public static OllamaEmbedRequestBuilder getInstance(String model, String... input) { return new OllamaEmbedRequestBuilder(model, List.of(input)); } - public OllamaEmbedRequestBuilder withOptions(Options options){ + public OllamaEmbedRequestBuilder withOptions(Options options) { this.request.setOptions(options.getOptionsMap()); return this; } - public OllamaEmbedRequestBuilder withKeepAlive(String keepAlive){ + public OllamaEmbedRequestBuilder withKeepAlive(String keepAlive) { this.request.setKeepAlive(keepAlive); return this; } - public OllamaEmbedRequestBuilder withoutTruncate(){ + public OllamaEmbedRequestBuilder withoutTruncate() { this.request.setTruncate(false); return this; } - public OllamaEmbedRequestModel build() { + public OllamaEmbedRequest build() { return this.request; } } diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedResponseModel.java b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java similarity index 59% rename from src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedResponseModel.java rename to src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java index b4f808c..512872d 100644 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbedResponseModel.java +++ b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java @@ -1,13 +1,20 @@ -package io.github.ollama4j.models.embeddings; +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.models.embed; import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - import java.util.List; +import lombok.Data; @SuppressWarnings("unused") @Data -public class OllamaEmbedResponseModel { +public class OllamaEmbedResult { @JsonProperty("model") private String model; diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingResponseModel.java b/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingResponseModel.java deleted file mode 100644 index 2d0d90a..0000000 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingResponseModel.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.github.ollama4j.models.embeddings; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -import java.util.List; - -@SuppressWarnings("unused") -@Data -@Deprecated(since="1.0.90") -public class OllamaEmbeddingResponseModel { - @JsonProperty("embedding") - private List embedding; -} diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestBuilder.java b/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestBuilder.java deleted file mode 100644 index 47daf75..0000000 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestBuilder.java +++ /dev/null @@ -1,32 +0,0 @@ -package io.github.ollama4j.models.embeddings; - -import io.github.ollama4j.utils.Options; - -@Deprecated(since="1.0.90") -public class OllamaEmbeddingsRequestBuilder { - - private OllamaEmbeddingsRequestBuilder(String model, String prompt){ - request = new OllamaEmbeddingsRequestModel(model, prompt); - } - - private OllamaEmbeddingsRequestModel request; - - public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){ - return new OllamaEmbeddingsRequestBuilder(model, prompt); - } - - public OllamaEmbeddingsRequestModel build(){ - return request; - } - - public OllamaEmbeddingsRequestBuilder withOptions(Options options){ - this.request.setOptions(options.getOptionsMap()); - return this; - } - - public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){ - this.request.setKeepAlive(keepAlive); - return this; - } - -} diff --git a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestModel.java b/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestModel.java deleted file mode 100644 index 7d113f0..0000000 --- a/src/main/java/io/github/ollama4j/models/embeddings/OllamaEmbeddingsRequestModel.java +++ /dev/null @@ -1,36 +0,0 @@ -package io.github.ollama4j.models.embeddings; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -import java.util.Map; - -import static io.github.ollama4j.utils.Utils.getObjectMapper; - -@Data -@RequiredArgsConstructor -@NoArgsConstructor -@Deprecated(since="1.0.90") -public class OllamaEmbeddingsRequestModel { - @NonNull - private String model; - @NonNull - private String prompt; - - protected Map options; - @JsonProperty(value = "keep_alive") - private String keepAlive; - - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } -} diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java index 3763f0a..05ad9c8 100644 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java +++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequest.java @@ -1,46 +1,51 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.generate; - import io.github.ollama4j.models.request.OllamaCommonRequest; +import io.github.ollama4j.tools.Tools; import io.github.ollama4j.utils.OllamaRequestBody; +import java.util.List; import lombok.Getter; import lombok.Setter; -import java.util.List; - @Getter @Setter -public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{ +public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody { - private String prompt; - private List images; + private String prompt; + private List images; + private String system; + private String context; + private boolean raw; + private boolean think; + private boolean useTools; + private List tools; - private String system; - private String context; - private boolean raw; - private boolean think; + public OllamaGenerateRequest() {} - public OllamaGenerateRequest() { - } - - public OllamaGenerateRequest(String model, String prompt) { - this.model = model; - this.prompt = prompt; - } - - public OllamaGenerateRequest(String model, String prompt, List images) { - this.model = model; - this.prompt = prompt; - this.images = images; - } - - @Override - public boolean equals(Object o) { - if (!(o instanceof OllamaGenerateRequest)) { - return false; + public OllamaGenerateRequest(String model, String prompt) { + this.model = model; + this.prompt = prompt; } - return this.toString().equals(o.toString()); - } + public OllamaGenerateRequest(String model, String prompt, List images) { + this.model = model; + this.prompt = prompt; + this.images = images; + } + @Override + public boolean equals(Object o) { + if (!(o instanceof OllamaGenerateRequest)) { + return false; + } + return this.toString().equals(o.toString()); + } } diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java index 713c46e..0717f9e 100644 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java +++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateRequestBuilder.java @@ -1,55 +1,121 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.generate; +import io.github.ollama4j.tools.Tools; import io.github.ollama4j.utils.Options; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; -/** - * Helper class for creating {@link OllamaGenerateRequest} - * objects using the builder-pattern. - */ +/** Helper class for creating {@link OllamaGenerateRequest} objects using the builder-pattern. */ public class OllamaGenerateRequestBuilder { - private OllamaGenerateRequestBuilder(String model, String prompt){ - request = new OllamaGenerateRequest(model, prompt); + private OllamaGenerateRequestBuilder() { + request = new OllamaGenerateRequest(); } private OllamaGenerateRequest request; - public static OllamaGenerateRequestBuilder getInstance(String model){ - return new OllamaGenerateRequestBuilder(model,""); + public static OllamaGenerateRequestBuilder builder() { + return new OllamaGenerateRequestBuilder(); } - public OllamaGenerateRequest build(){ + public OllamaGenerateRequest build() { return request; } - public OllamaGenerateRequestBuilder withPrompt(String prompt){ + public OllamaGenerateRequestBuilder withPrompt(String prompt) { request.setPrompt(prompt); return this; } - public OllamaGenerateRequestBuilder withGetJsonResponse(){ - this.request.setReturnFormatJson(true); + public OllamaGenerateRequestBuilder withTools(List tools) { + request.setTools(tools); return this; } - public OllamaGenerateRequestBuilder withOptions(Options options){ + public OllamaGenerateRequestBuilder withModel(String model) { + request.setModel(model); + return this; + } + + public OllamaGenerateRequestBuilder withGetJsonResponse() { + this.request.setFormat("json"); + return this; + } + + public OllamaGenerateRequestBuilder withOptions(Options options) { this.request.setOptions(options.getOptionsMap()); return this; } - public OllamaGenerateRequestBuilder withTemplate(String template){ + public OllamaGenerateRequestBuilder withTemplate(String template) { this.request.setTemplate(template); return this; } - public OllamaGenerateRequestBuilder withStreaming(){ - this.request.setStream(true); + public OllamaGenerateRequestBuilder withStreaming(boolean streaming) { + this.request.setStream(streaming); return this; } - public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){ + public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive) { this.request.setKeepAlive(keepAlive); return this; } + public OllamaGenerateRequestBuilder withRaw(boolean raw) { + this.request.setRaw(raw); + return this; + } + + public OllamaGenerateRequestBuilder withThink(boolean think) { + this.request.setThink(think); + return this; + } + + public OllamaGenerateRequestBuilder withUseTools(boolean useTools) { + this.request.setUseTools(useTools); + return this; + } + + public OllamaGenerateRequestBuilder withFormat(java.util.Map format) { + this.request.setFormat(format); + return this; + } + + public OllamaGenerateRequestBuilder withSystem(String system) { + this.request.setSystem(system); + return this; + } + + public OllamaGenerateRequestBuilder withContext(String context) { + this.request.setContext(context); + return this; + } + + public OllamaGenerateRequestBuilder withImagesBase64(java.util.List images) { + this.request.setImages(images); + return this; + } + + public OllamaGenerateRequestBuilder withImages(java.util.List imageFiles) + throws IOException { + java.util.List images = new ArrayList<>(); + for (File imageFile : imageFiles) { + images.add(Base64.getEncoder().encodeToString(Files.readAllBytes(imageFile.toPath()))); + } + this.request.setImages(images); + return this; + } } diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateResponseModel.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateResponseModel.java index a3d23ec..bf33133 100644 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateResponseModel.java +++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateResponseModel.java @@ -1,25 +1,32 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.generate; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - import java.util.List; +import lombok.Data; @Data @JsonIgnoreProperties(ignoreUnknown = true) public class OllamaGenerateResponseModel { private String model; private @JsonProperty("created_at") String createdAt; - private String response; - private String thinking; - private boolean done; private @JsonProperty("done_reason") String doneReason; + private boolean done; private List context; private @JsonProperty("total_duration") Long totalDuration; private @JsonProperty("load_duration") Long loadDuration; - private @JsonProperty("prompt_eval_count") Integer promptEvalCount; private @JsonProperty("prompt_eval_duration") Long promptEvalDuration; - private @JsonProperty("eval_count") Integer evalCount; private @JsonProperty("eval_duration") Long evalDuration; + private @JsonProperty("prompt_eval_count") Integer promptEvalCount; + private @JsonProperty("eval_count") Integer evalCount; + private String response; + private String thinking; } diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateStreamObserver.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateStreamObserver.java index 67ae571..0e908dc 100644 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateStreamObserver.java +++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateStreamObserver.java @@ -1,20 +1,29 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.generate; import java.util.ArrayList; import java.util.List; +import lombok.Getter; +@Getter public class OllamaGenerateStreamObserver { - - private final OllamaStreamHandler thinkingStreamHandler; - private final OllamaStreamHandler responseStreamHandler; + private final OllamaGenerateTokenHandler thinkingStreamHandler; + private final OllamaGenerateTokenHandler responseStreamHandler; private final List responseParts = new ArrayList<>(); - private String message = ""; - - public OllamaGenerateStreamObserver(OllamaStreamHandler thinkingStreamHandler, OllamaStreamHandler responseStreamHandler) { - this.responseStreamHandler = responseStreamHandler; + public OllamaGenerateStreamObserver( + OllamaGenerateTokenHandler thinkingStreamHandler, + OllamaGenerateTokenHandler responseStreamHandler) { this.thinkingStreamHandler = thinkingStreamHandler; + this.responseStreamHandler = responseStreamHandler; } public void notify(OllamaGenerateResponseModel currentResponsePart) { @@ -30,16 +39,8 @@ public class OllamaGenerateStreamObserver { boolean hasThinking = thinking != null && !thinking.isEmpty(); if (!hasResponse && hasThinking && thinkingStreamHandler != null) { - // message = message + thinking; - - // use only new tokens received, instead of appending the tokens to the previous - // ones and sending the full string again thinkingStreamHandler.accept(thinking); } else if (hasResponse && responseStreamHandler != null) { - // message = message + response; - - // use only new tokens received, instead of appending the tokens to the previous - // ones and sending the full string again responseStreamHandler.accept(response); } } diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateTokenHandler.java b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateTokenHandler.java new file mode 100644 index 0000000..d8d9d01 --- /dev/null +++ b/src/main/java/io/github/ollama4j/models/generate/OllamaGenerateTokenHandler.java @@ -0,0 +1,15 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ +package io.github.ollama4j.models.generate; + +import java.util.function.Consumer; + +public interface OllamaGenerateTokenHandler extends Consumer { + void accept(String message); +} diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaStreamHandler.java b/src/main/java/io/github/ollama4j/models/generate/OllamaStreamHandler.java deleted file mode 100644 index e2da640..0000000 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaStreamHandler.java +++ /dev/null @@ -1,7 +0,0 @@ -package io.github.ollama4j.models.generate; - -import java.util.function.Consumer; - -public interface OllamaStreamHandler extends Consumer { - void accept(String message); -} diff --git a/src/main/java/io/github/ollama4j/models/generate/OllamaTokenHandler.java b/src/main/java/io/github/ollama4j/models/generate/OllamaTokenHandler.java deleted file mode 100644 index a0aed8c..0000000 --- a/src/main/java/io/github/ollama4j/models/generate/OllamaTokenHandler.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.github.ollama4j.models.generate; - -import io.github.ollama4j.models.chat.OllamaChatResponseModel; - -import java.util.function.Consumer; - -public interface OllamaTokenHandler extends Consumer { -} diff --git a/src/main/java/io/github/ollama4j/models/ps/ModelsProcessResponse.java b/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java similarity index 79% rename from src/main/java/io/github/ollama4j/models/ps/ModelsProcessResponse.java rename to src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java index 490d362..257d019 100644 --- a/src/main/java/io/github/ollama4j/models/ps/ModelsProcessResponse.java +++ b/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java @@ -1,21 +1,29 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.ps; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; import lombok.Data; import lombok.NoArgsConstructor; -import java.util.List; - @Data @NoArgsConstructor @JsonIgnoreProperties(ignoreUnknown = true) -public class ModelsProcessResponse { +public class ModelProcessesResult { @JsonProperty("models") private List models; @Data @NoArgsConstructor + @JsonIgnoreProperties(ignoreUnknown = true) public static class ModelProcess { @JsonProperty("name") private String name; @@ -33,7 +41,7 @@ public class ModelsProcessResponse { private ModelDetails details; @JsonProperty("expires_at") - private String expiresAt; // Consider using LocalDateTime if you need to process date/time + private String expiresAt; @JsonProperty("size_vram") private long sizeVram; diff --git a/src/main/java/io/github/ollama4j/models/request/Auth.java b/src/main/java/io/github/ollama4j/models/request/Auth.java index 70c9c1b..d81e817 100644 --- a/src/main/java/io/github/ollama4j/models/request/Auth.java +++ b/src/main/java/io/github/ollama4j/models/request/Auth.java @@ -1,10 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; public abstract class Auth { - /** - * Get authentication header value. - * - * @return authentication header value - */ - public abstract String getAuthHeaderValue(); + /** + * Get authentication header value. + * + * @return authentication header value + */ + public abstract String getAuthHeaderValue(); } diff --git a/src/main/java/io/github/ollama4j/models/request/BasicAuth.java b/src/main/java/io/github/ollama4j/models/request/BasicAuth.java index 13f6a59..80e6653 100644 --- a/src/main/java/io/github/ollama4j/models/request/BasicAuth.java +++ b/src/main/java/io/github/ollama4j/models/request/BasicAuth.java @@ -1,25 +1,32 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; +import java.util.Base64; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; -import java.util.Base64; - @Data @AllArgsConstructor @EqualsAndHashCode(callSuper = false) public class BasicAuth extends Auth { - private String username; - private String password; + private String username; + private String password; - /** - * Get basic authentication header value. - * - * @return basic authentication header value (encoded credentials) - */ - public String getAuthHeaderValue() { - final String credentialsToEncode = this.getUsername() + ":" + this.getPassword(); - return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); - } + /** + * Get basic authentication header value. + * + * @return basic authentication header value (encoded credentials) + */ + public String getAuthHeaderValue() { + final String credentialsToEncode = this.getUsername() + ":" + this.getPassword(); + return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); + } } diff --git a/src/main/java/io/github/ollama4j/models/request/BearerAuth.java b/src/main/java/io/github/ollama4j/models/request/BearerAuth.java index 4d876f2..cc25309 100644 --- a/src/main/java/io/github/ollama4j/models/request/BearerAuth.java +++ b/src/main/java/io/github/ollama4j/models/request/BearerAuth.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; import lombok.AllArgsConstructor; diff --git a/src/main/java/io/github/ollama4j/models/request/CustomModelFileContentsRequest.java b/src/main/java/io/github/ollama4j/models/request/CustomModelFileContentsRequest.java index 52bc684..b01e18c 100644 --- a/src/main/java/io/github/ollama4j/models/request/CustomModelFileContentsRequest.java +++ b/src/main/java/io/github/ollama4j/models/request/CustomModelFileContentsRequest.java @@ -1,23 +1,32 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.core.JsonProcessingException; import lombok.AllArgsConstructor; import lombok.Data; -import static io.github.ollama4j.utils.Utils.getObjectMapper; - +@SuppressWarnings("SpellCheckingInspection") @Data @AllArgsConstructor public class CustomModelFileContentsRequest { - private String name; - private String modelfile; + private String name; + private String modelfile; - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + @Override + public String toString() { + try { + return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/request/CustomModelFilePathRequest.java b/src/main/java/io/github/ollama4j/models/request/CustomModelFilePathRequest.java index 578e1c0..9ac9eb4 100644 --- a/src/main/java/io/github/ollama4j/models/request/CustomModelFilePathRequest.java +++ b/src/main/java/io/github/ollama4j/models/request/CustomModelFilePathRequest.java @@ -1,23 +1,31 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.core.JsonProcessingException; import lombok.AllArgsConstructor; import lombok.Data; -import static io.github.ollama4j.utils.Utils.getObjectMapper; - @Data @AllArgsConstructor public class CustomModelFilePathRequest { - private String name; - private String path; + private String name; + private String path; - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + @Override + public String toString() { + try { + return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/request/CustomModelRequest.java b/src/main/java/io/github/ollama4j/models/request/CustomModelRequest.java index b2ecb91..7cd7417 100644 --- a/src/main/java/io/github/ollama4j/models/request/CustomModelRequest.java +++ b/src/main/java/io/github/ollama4j/models/request/CustomModelRequest.java @@ -1,15 +1,21 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; -import com.fasterxml.jackson.core.JsonProcessingException; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; - -import java.util.List; -import java.util.Map; - import static io.github.ollama4j.utils.Utils.getObjectMapper; +import com.fasterxml.jackson.core.JsonProcessingException; +import java.util.List; +import java.util.Map; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; @Data @AllArgsConstructor @@ -20,7 +26,7 @@ public class CustomModelRequest { private Map files; private Map adapters; private String template; - private Object license; // Using Object to handle both String and List + private Object license; private String system; private Map parameters; private List messages; diff --git a/src/main/java/io/github/ollama4j/models/request/ModelRequest.java b/src/main/java/io/github/ollama4j/models/request/ModelRequest.java index eca4d41..9d771ef 100644 --- a/src/main/java/io/github/ollama4j/models/request/ModelRequest.java +++ b/src/main/java/io/github/ollama4j/models/request/ModelRequest.java @@ -1,22 +1,30 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.core.JsonProcessingException; import lombok.AllArgsConstructor; import lombok.Data; -import static io.github.ollama4j.utils.Utils.getObjectMapper; - @Data @AllArgsConstructor public class ModelRequest { - private String name; + private String name; - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + @Override + public String toString() { + try { + return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java index 49b4a28..a08cd18 100644 --- a/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java +++ b/src/main/java/io/github/ollama4j/models/request/OllamaChatEndpointCaller.java @@ -1,15 +1,21 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; +import io.github.ollama4j.metrics.MetricsRecorder; import io.github.ollama4j.models.chat.*; -import io.github.ollama4j.models.generate.OllamaTokenHandler; +import io.github.ollama4j.models.chat.OllamaChatTokenHandler; import io.github.ollama4j.models.response.OllamaErrorResponse; import io.github.ollama4j.utils.Utils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -20,50 +26,49 @@ import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.nio.charset.StandardCharsets; import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -/** - * Specialization class for requests - */ +/** Specialization class for requests */ @SuppressWarnings("resource") public class OllamaChatEndpointCaller extends OllamaEndpointCaller { private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); + public static final String endpoint = "/api/chat"; - private OllamaTokenHandler tokenHandler; + private OllamaChatTokenHandler tokenHandler; public OllamaChatEndpointCaller(String host, Auth auth, long requestTimeoutSeconds) { super(host, auth, requestTimeoutSeconds); } - @Override - protected String getEndpointSuffix() { - return "/api/chat"; - } - /** - * Parses streamed Response line from ollama chat. - * Using {@link com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw - * {@link IllegalArgumentException} in case of null line or {@link com.fasterxml.jackson.core.JsonParseException} - * in case the JSON Object cannot be parsed to a {@link OllamaChatResponseModel}. Thus, the ResponseModel should - * never be null. + * Parses streamed Response line from ollama chat. Using {@link + * com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw + * {@link IllegalArgumentException} in case of null line or {@link + * com.fasterxml.jackson.core.JsonParseException} in case the JSON Object cannot be parsed to a + * {@link OllamaChatResponseModel}. Thus, the ResponseModel should never be null. * - * @param line streamed line of ollama stream response + * @param line streamed line of ollama stream response * @param responseBuffer Stringbuffer to add latest response message part to * @return TRUE, if ollama-Response has 'done' state */ @Override - protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer) { + protected boolean parseResponseAndAddToBuffer( + String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer) { try { - OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); - // it seems that under heavy load ollama responds with an empty chat message part in the streamed response - // thus, we null check the message and hope that the next streamed response has some message content again + OllamaChatResponseModel ollamaResponseModel = + Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); + // It seems that under heavy load Ollama responds with an empty chat message part in the + // streamed response. + // Thus, we null check the message and hope that the next streamed response has some + // message content again. OllamaChatMessage message = ollamaResponseModel.getMessage(); if (message != null) { if (message.getThinking() != null) { thinkingBuffer.append(message.getThinking()); - } - else { - responseBuffer.append(message.getContent()); + } else { + responseBuffer.append(message.getResponse()); } if (tokenHandler != null) { tokenHandler.accept(ollamaResponseModel); @@ -76,22 +81,22 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { } } - public OllamaChatResult call(OllamaChatRequest body, OllamaTokenHandler tokenHandler) - throws OllamaBaseException, IOException, InterruptedException { + public OllamaChatResult call(OllamaChatRequest body, OllamaChatTokenHandler tokenHandler) + throws OllamaException, IOException, InterruptedException { this.tokenHandler = tokenHandler; return callSync(body); } - public OllamaChatResult callSync(OllamaChatRequest body) throws OllamaBaseException, IOException, InterruptedException { - // Create Request + public OllamaChatResult callSync(OllamaChatRequest body) + throws OllamaException, IOException, InterruptedException { + long startTime = System.currentTimeMillis(); HttpClient httpClient = HttpClient.newHttpClient(); - URI uri = URI.create(getHost() + getEndpointSuffix()); + URI uri = URI.create(getHost() + endpoint); HttpRequest.Builder requestBuilder = - getRequestBuilderDefault(uri) - .POST( - body.getBodyPublisher()); + getRequestBuilderDefault(uri).POST(body.getBodyPublisher()); HttpRequest request = requestBuilder.build(); LOG.debug("Asking model: {}", body); + System.out.println("Asking model: " + Utils.toJSON(body)); HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); @@ -101,57 +106,92 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller { StringBuilder thinkingBuffer = new StringBuilder(); OllamaChatResponseModel ollamaChatResponseModel = null; List wantedToolsForStream = null; - try (BufferedReader reader = - new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { - if (statusCode == 404) { - LOG.warn("Status code: 404 (Not Found)"); - OllamaErrorResponse ollamaResponseModel = - Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 401) { - LOG.warn("Status code: 401 (Unauthorized)"); - OllamaErrorResponse ollamaResponseModel = - Utils.getObjectMapper() - .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 400) { - LOG.warn("Status code: 400 (Bad Request)"); - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, - OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 500) { - LOG.warn("Status code: 500 (Internal Server Error)"); - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, - OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else { - boolean finished = parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer); - ollamaChatResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); - if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) { - wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls(); - } - if (finished && body.stream) { - ollamaChatResponseModel.getMessage().setContent(responseBuffer.toString()); - ollamaChatResponseModel.getMessage().setThinking(thinkingBuffer.toString()); - break; - } + if (handleErrorStatus(statusCode, line, responseBuffer)) { + continue; + } + boolean finished = + parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer); + ollamaChatResponseModel = + Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); + if (body.stream && ollamaChatResponseModel.getMessage().getToolCalls() != null) { + wantedToolsForStream = ollamaChatResponseModel.getMessage().getToolCalls(); + } + if (finished && body.stream) { + ollamaChatResponseModel.getMessage().setResponse(responseBuffer.toString()); + ollamaChatResponseModel.getMessage().setThinking(thinkingBuffer.toString()); + break; } } } + MetricsRecorder.record( + endpoint, + body.getModel(), + false, + body.isThink(), + body.isStream(), + body.getOptions(), + body.getFormat(), + startTime, + statusCode, + responseBuffer); if (statusCode != 200) { - LOG.error("Status code " + statusCode); - throw new OllamaBaseException(responseBuffer.toString()); - } else { - if (wantedToolsForStream != null) { - ollamaChatResponseModel.getMessage().setToolCalls(wantedToolsForStream); - } - OllamaChatResult ollamaResult = - new OllamaChatResult(ollamaChatResponseModel, body.getMessages()); - LOG.debug("Model response: {}", ollamaResult); - return ollamaResult; + LOG.error("Status code: {}", statusCode); + System.out.println(responseBuffer); + throw new OllamaException(responseBuffer.toString()); + } + if (wantedToolsForStream != null && ollamaChatResponseModel != null) { + ollamaChatResponseModel.getMessage().setToolCalls(wantedToolsForStream); + } + OllamaChatResult ollamaResult = + new OllamaChatResult(ollamaChatResponseModel, body.getMessages()); + LOG.debug("Model response: {}", ollamaResult); + return ollamaResult; + } + + /** + * Handles error status codes and appends error messages to the response buffer. Returns true if + * an error was handled, false otherwise. + */ + private boolean handleErrorStatus(int statusCode, String line, StringBuilder responseBuffer) + throws IOException { + switch (statusCode) { + case 404: + LOG.warn("Status code: 404 (Not Found)"); + responseBuffer.append( + Utils.getObjectMapper() + .readValue(line, OllamaErrorResponse.class) + .getError()); + return true; + case 401: + LOG.warn("Status code: 401 (Unauthorized)"); + responseBuffer.append( + Utils.getObjectMapper() + .readValue( + "{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class) + .getError()); + return true; + case 400: + LOG.warn("Status code: 400 (Bad Request)"); + responseBuffer.append( + Utils.getObjectMapper() + .readValue(line, OllamaErrorResponse.class) + .getError()); + return true; + case 500: + LOG.warn("Status code: 500 (Internal Server Error)"); + responseBuffer.append( + Utils.getObjectMapper() + .readValue(line, OllamaErrorResponse.class) + .getError()); + return true; + default: + return false; } } } diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaCommonRequest.java b/src/main/java/io/github/ollama4j/models/request/OllamaCommonRequest.java index 879d801..d8c996c 100644 --- a/src/main/java/io/github/ollama4j/models/request/OllamaCommonRequest.java +++ b/src/main/java/io/github/ollama4j/models/request/OllamaCommonRequest.java @@ -1,35 +1,49 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer; import io.github.ollama4j.utils.Utils; -import lombok.Data; - import java.util.Map; +import lombok.Data; @Data @JsonInclude(JsonInclude.Include.NON_NULL) public abstract class OllamaCommonRequest { - protected String model; - @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) - @JsonProperty(value = "format") - protected Boolean returnFormatJson; - protected Map options; - protected String template; - protected boolean stream; - @JsonProperty(value = "keep_alive") - protected String keepAlive; + protected String model; + /** + * The value can either be + *
{@code json }
+ * or + *
{@code {"key1": "val1", "key2": "val2"} }
+ */ + @JsonProperty(value = "format", required = false, defaultValue = "json") + protected Object format; - public String toString() { - try { - return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + protected Map options; + protected String template; + protected boolean stream; + + @JsonProperty(value = "keep_alive") + protected String keepAlive; + + public String toString() { + try { + return Utils.getObjectMapper() + .writerWithDefaultPrettyPrinter() + .writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaEndpointCaller.java b/src/main/java/io/github/ollama4j/models/request/OllamaEndpointCaller.java index 50247ae..85c5132 100644 --- a/src/main/java/io/github/ollama4j/models/request/OllamaEndpointCaller.java +++ b/src/main/java/io/github/ollama4j/models/request/OllamaEndpointCaller.java @@ -1,14 +1,21 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; import io.github.ollama4j.utils.Constants; -import lombok.Getter; - import java.net.URI; import java.net.http.HttpRequest; import java.time.Duration; +import lombok.Getter; /** - * Abstract helperclass to call the ollama api server. + * Abstract helper class to call the ollama api server. */ @Getter public abstract class OllamaEndpointCaller { @@ -17,16 +24,14 @@ public abstract class OllamaEndpointCaller { private final Auth auth; private final long requestTimeoutSeconds; - public OllamaEndpointCaller(String host, Auth auth, long requestTimeoutSeconds) { + protected OllamaEndpointCaller(String host, Auth auth, long requestTimeoutSeconds) { this.host = host; this.auth = auth; this.requestTimeoutSeconds = requestTimeoutSeconds; } - protected abstract String getEndpointSuffix(); - - protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer); - + protected abstract boolean parseResponseAndAddToBuffer( + String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer); /** * Get default request builder. @@ -37,7 +42,9 @@ public abstract class OllamaEndpointCaller { protected HttpRequest.Builder getRequestBuilderDefault(URI uri) { HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri) - .header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) .timeout(Duration.ofSeconds(this.requestTimeoutSeconds)); if (isAuthCredentialsSet()) { requestBuilder.header("Authorization", this.auth.getAuthHeaderValue()); @@ -53,5 +60,4 @@ public abstract class OllamaEndpointCaller { protected boolean isAuthCredentialsSet() { return this.auth != null; } - } diff --git a/src/main/java/io/github/ollama4j/models/request/OllamaGenerateEndpointCaller.java b/src/main/java/io/github/ollama4j/models/request/OllamaGenerateEndpointCaller.java index 2c70f62..fcd16fc 100644 --- a/src/main/java/io/github/ollama4j/models/request/OllamaGenerateEndpointCaller.java +++ b/src/main/java/io/github/ollama4j/models/request/OllamaGenerateEndpointCaller.java @@ -1,17 +1,22 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.request; import com.fasterxml.jackson.core.JsonProcessingException; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; -import io.github.ollama4j.models.generate.OllamaStreamHandler; +import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler; import io.github.ollama4j.models.response.OllamaErrorResponse; import io.github.ollama4j.models.response.OllamaResult; import io.github.ollama4j.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -21,11 +26,14 @@ import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.nio.charset.StandardCharsets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @SuppressWarnings("resource") public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); + public static final String endpoint = "/api/generate"; private OllamaGenerateStreamObserver responseStreamObserver; @@ -34,14 +42,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { } @Override - protected String getEndpointSuffix() { - return "/api/generate"; - } - - @Override - protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer) { + protected boolean parseResponseAndAddToBuffer( + String line, StringBuilder responseBuffer, StringBuilder thinkingBuffer) { try { - OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); + OllamaGenerateResponseModel ollamaResponseModel = + Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); if (ollamaResponseModel.getResponse() != null) { responseBuffer.append(ollamaResponseModel.getResponse()); } @@ -58,55 +63,61 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { } } - public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler thinkingStreamHandler, OllamaStreamHandler responseStreamHandler) throws OllamaBaseException, IOException, InterruptedException { - responseStreamObserver = new OllamaGenerateStreamObserver(thinkingStreamHandler, responseStreamHandler); + public OllamaResult call( + OllamaRequestBody body, + OllamaGenerateTokenHandler thinkingStreamHandler, + OllamaGenerateTokenHandler responseStreamHandler) + throws OllamaException, IOException, InterruptedException { + responseStreamObserver = + new OllamaGenerateStreamObserver(thinkingStreamHandler, responseStreamHandler); return callSync(body); } /** - * Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response. + * Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for + * the response. * * @param body POST body payload * @return result answer given by the assistant - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read + * @throws OllamaException any response code than 200 has been returned + * @throws IOException in case the responseStream can not be read * @throws InterruptedException in case the server is not reachable or network issues happen */ @SuppressWarnings("DuplicatedCode") - public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException { - // Create Request + public OllamaResult callSync(OllamaRequestBody body) + throws OllamaException, IOException, InterruptedException { long startTime = System.currentTimeMillis(); HttpClient httpClient = HttpClient.newHttpClient(); - URI uri = URI.create(getHost() + getEndpointSuffix()); - HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).POST(body.getBodyPublisher()); + URI uri = URI.create(getHost() + endpoint); + HttpRequest.Builder requestBuilder = + getRequestBuilderDefault(uri).POST(body.getBodyPublisher()); HttpRequest request = requestBuilder.build(); LOG.debug("Asking model: {}", body); - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); + HttpResponse response = + httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); int statusCode = response.statusCode(); InputStream responseBodyStream = response.body(); StringBuilder responseBuffer = new StringBuilder(); StringBuilder thinkingBuffer = new StringBuilder(); OllamaGenerateResponseModel ollamaGenerateResponseModel = null; - try (BufferedReader reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { - if (statusCode == 404) { - LOG.warn("Status code: 404 (Not Found)"); - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 401) { - LOG.warn("Status code: 401 (Unauthorized)"); - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 400) { - LOG.warn("Status code: 400 (Bad Request)"); - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); + if (statusCode >= 400) { + LOG.warn("Error code: {}", statusCode); + OllamaErrorResponse ollamaResponseModel = + Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); responseBuffer.append(ollamaResponseModel.getError()); } else { - boolean finished = parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer); + boolean finished = + parseResponseAndAddToBuffer(line, responseBuffer, thinkingBuffer); if (finished) { - ollamaGenerateResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); + ollamaGenerateResponseModel = + Utils.getObjectMapper() + .readValue(line, OllamaGenerateResponseModel.class); break; } } @@ -115,11 +126,16 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { if (statusCode != 200) { LOG.error("Status code: {}", statusCode); - throw new OllamaBaseException(responseBuffer.toString()); + LOG.error("Response: {}", responseBuffer); + throw new OllamaException(responseBuffer.toString()); } else { long endTime = System.currentTimeMillis(); - OllamaResult ollamaResult = new OllamaResult(responseBuffer.toString(), thinkingBuffer.toString(), endTime - startTime, statusCode); - + OllamaResult ollamaResult = + new OllamaResult( + responseBuffer.toString(), + thinkingBuffer.toString(), + endTime - startTime, + statusCode); ollamaResult.setModel(ollamaGenerateResponseModel.getModel()); ollamaResult.setCreatedAt(ollamaGenerateResponseModel.getCreatedAt()); ollamaResult.setDone(ollamaGenerateResponseModel.isDone()); diff --git a/src/main/java/io/github/ollama4j/models/response/LibraryModel.java b/src/main/java/io/github/ollama4j/models/response/LibraryModel.java deleted file mode 100644 index c5f1627..0000000 --- a/src/main/java/io/github/ollama4j/models/response/LibraryModel.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.github.ollama4j.models.response; - -import lombok.Data; - -import java.util.ArrayList; -import java.util.List; - -@Data -public class LibraryModel { - - private String name; - private String description; - private String pullCount; - private int totalTags; - private List popularTags = new ArrayList<>(); - private String lastUpdated; -} diff --git a/src/main/java/io/github/ollama4j/models/response/LibraryModelDetail.java b/src/main/java/io/github/ollama4j/models/response/LibraryModelDetail.java deleted file mode 100644 index 142873c..0000000 --- a/src/main/java/io/github/ollama4j/models/response/LibraryModelDetail.java +++ /dev/null @@ -1,12 +0,0 @@ -package io.github.ollama4j.models.response; - -import lombok.Data; - -import java.util.List; - -@Data -public class LibraryModelDetail { - - private LibraryModel model; - private List tags; -} diff --git a/src/main/java/io/github/ollama4j/models/response/LibraryModelTag.java b/src/main/java/io/github/ollama4j/models/response/LibraryModelTag.java deleted file mode 100644 index cd65d32..0000000 --- a/src/main/java/io/github/ollama4j/models/response/LibraryModelTag.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.github.ollama4j.models.response; - -import lombok.Data; - -@Data -public class LibraryModelTag { - private String name; - private String tag; - private String size; - private String lastUpdated; -} diff --git a/src/main/java/io/github/ollama4j/models/response/ListModelsResponse.java b/src/main/java/io/github/ollama4j/models/response/ListModelsResponse.java index e22b796..c7e2bdf 100644 --- a/src/main/java/io/github/ollama4j/models/response/ListModelsResponse.java +++ b/src/main/java/io/github/ollama4j/models/response/ListModelsResponse.java @@ -1,8 +1,15 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; -import lombok.Data; - import java.util.List; +import lombok.Data; @Data public class ListModelsResponse { diff --git a/src/main/java/io/github/ollama4j/models/response/Model.java b/src/main/java/io/github/ollama4j/models/response/Model.java index a616404..a419f8d 100644 --- a/src/main/java/io/github/ollama4j/models/response/Model.java +++ b/src/main/java/io/github/ollama4j/models/response/Model.java @@ -1,54 +1,65 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import io.github.ollama4j.utils.Utils; -import lombok.Data; - import java.time.OffsetDateTime; +import lombok.Data; @Data @JsonIgnoreProperties(ignoreUnknown = true) public class Model { - private String name; - private String model; - @JsonProperty("modified_at") - private OffsetDateTime modifiedAt; - @JsonProperty("expires_at") - private OffsetDateTime expiresAt; - private String digest; - private long size; - @JsonProperty("details") - private ModelMeta modelMeta; + private String name; + private String model; + @JsonProperty("modified_at") + private OffsetDateTime modifiedAt; - /** - * Returns the model name without its version - * - * @return model name - */ - public String getModelName() { - return name.split(":")[0]; - } + @JsonProperty("expires_at") + private OffsetDateTime expiresAt; - /** - * Returns the model version without its name - * - * @return model version - */ - public String getModelVersion() { - return name.split(":")[1]; - } + private String digest; + private long size; + + @JsonProperty("details") + private ModelMeta modelMeta; + + /** + * Returns the model name without its version + * + * @return model name + */ + public String getModelName() { + return name.split(":")[0]; + } + + /** + * Returns the model version without its name + * + * @return model version + */ + public String getModelVersion() { + return name.split(":")[1]; + } @Override - public String toString() { - try { - return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + public String toString() { + try { + return Utils.getObjectMapper() + .writerWithDefaultPrettyPrinter() + .writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } - } diff --git a/src/main/java/io/github/ollama4j/models/response/ModelDetail.java b/src/main/java/io/github/ollama4j/models/response/ModelDetail.java index cf7e6bb..2140bfd 100644 --- a/src/main/java/io/github/ollama4j/models/response/ModelDetail.java +++ b/src/main/java/io/github/ollama4j/models/response/ModelDetail.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @@ -9,22 +17,24 @@ import lombok.Data; @Data @JsonIgnoreProperties(ignoreUnknown = true) public class ModelDetail { - private String license; + private String license; - @JsonProperty("modelfile") - private String modelFile; + @JsonProperty("modelfile") + private String modelFile; - private String parameters; - private String template; - private String system; - private ModelMeta details; + private String parameters; + private String template; + private String system; + private ModelMeta details; @Override - public String toString() { - try { - return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + public String toString() { + try { + return Utils.getObjectMapper() + .writerWithDefaultPrettyPrinter() + .writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/response/ModelMeta.java b/src/main/java/io/github/ollama4j/models/response/ModelMeta.java index eb7f176..3c5a4c4 100644 --- a/src/main/java/io/github/ollama4j/models/response/ModelMeta.java +++ b/src/main/java/io/github/ollama4j/models/response/ModelMeta.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @@ -9,27 +17,29 @@ import lombok.Data; @Data @JsonIgnoreProperties(ignoreUnknown = true) public class ModelMeta { - @JsonProperty("format") - private String format; + @JsonProperty("format") + private String format; - @JsonProperty("family") - private String family; + @JsonProperty("family") + private String family; - @JsonProperty("families") - private String[] families; + @JsonProperty("families") + private String[] families; - @JsonProperty("parameter_size") - private String parameterSize; + @JsonProperty("parameter_size") + private String parameterSize; - @JsonProperty("quantization_level") - private String quantizationLevel; + @JsonProperty("quantization_level") + private String quantizationLevel; @Override - public String toString() { - try { - return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + public String toString() { + try { + return Utils.getObjectMapper() + .writerWithDefaultPrettyPrinter() + .writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/response/ModelPullResponse.java b/src/main/java/io/github/ollama4j/models/response/ModelPullResponse.java index eac1870..2078348 100644 --- a/src/main/java/io/github/ollama4j/models/response/ModelPullResponse.java +++ b/src/main/java/io/github/ollama4j/models/response/ModelPullResponse.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaAsyncResultStreamer.java b/src/main/java/io/github/ollama4j/models/response/OllamaAsyncResultStreamer.java index f4a68f7..cb566b6 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaAsyncResultStreamer.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaAsyncResultStreamer.java @@ -1,15 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.models.generate.OllamaGenerateRequest; import io.github.ollama4j.models.generate.OllamaGenerateResponseModel; import io.github.ollama4j.utils.Constants; import io.github.ollama4j.utils.Utils; -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.Setter; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -19,6 +22,10 @@ import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.nio.charset.StandardCharsets; import java.time.Duration; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.Setter; @Data @EqualsAndHashCode(callSuper = true) @@ -31,32 +38,30 @@ public class OllamaAsyncResultStreamer extends Thread { private String completeResponse; private String completeThinkingResponse; - /** * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a * failure. If the request was a failure, the `getResponse()` method will return the error * message. */ - @Getter - private boolean succeeded; + @Getter private boolean succeeded; - @Setter - private long requestTimeoutSeconds; + @Setter private long requestTimeoutSeconds; /** * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama * server. */ - @Getter - private int httpStatusCode; + @Getter private int httpStatusCode; /** * -- GETTER -- Returns the response time in milliseconds. */ - @Getter - private long responseTime = 0; + @Getter private long responseTime = 0; - public OllamaAsyncResultStreamer(HttpRequest.Builder requestBuilder, OllamaGenerateRequest ollamaRequestModel, long requestTimeoutSeconds) { + public OllamaAsyncResultStreamer( + HttpRequest.Builder requestBuilder, + OllamaGenerateRequest ollamaRequestModel, + long requestTimeoutSeconds) { this.requestBuilder = requestBuilder; this.ollamaRequestModel = ollamaRequestModel; this.completeResponse = ""; @@ -70,25 +75,41 @@ public class OllamaAsyncResultStreamer extends Thread { HttpClient httpClient = HttpClient.newHttpClient(); long startTime = System.currentTimeMillis(); try { - HttpRequest request = requestBuilder.POST(HttpRequest.BodyPublishers.ofString(Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))).header(Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, Constants.HttpConstants.APPLICATION_JSON).timeout(Duration.ofSeconds(requestTimeoutSeconds)).build(); - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); + HttpRequest request = + requestBuilder + .POST( + HttpRequest.BodyPublishers.ofString( + Utils.getObjectMapper() + .writeValueAsString(ollamaRequestModel))) + .header( + Constants.HttpConstants.HEADER_KEY_CONTENT_TYPE, + Constants.HttpConstants.APPLICATION_JSON) + .timeout(Duration.ofSeconds(requestTimeoutSeconds)) + .build(); + HttpResponse response = + httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); int statusCode = response.statusCode(); this.httpStatusCode = statusCode; InputStream responseBodyStream = response.body(); BufferedReader reader = null; try { - reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8)); + reader = + new BufferedReader( + new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8)); String line; StringBuilder thinkingBuffer = new StringBuilder(); StringBuilder responseBuffer = new StringBuilder(); while ((line = reader.readLine()) != null) { if (statusCode == 404) { - OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); + OllamaErrorResponse ollamaResponseModel = + Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); responseStream.add(ollamaResponseModel.getError()); responseBuffer.append(ollamaResponseModel.getError()); } else { - OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); + OllamaGenerateResponseModel ollamaResponseModel = + Utils.getObjectMapper() + .readValue(line, OllamaGenerateResponseModel.class); String thinkingTokens = ollamaResponseModel.getThinking(); String responseTokens = ollamaResponseModel.getResponse(); if (thinkingTokens == null) { @@ -115,24 +136,27 @@ public class OllamaAsyncResultStreamer extends Thread { try { reader.close(); } catch (IOException e) { - // Optionally log or handle + /* do nothing */ } } if (responseBodyStream != null) { try { responseBodyStream.close(); } catch (IOException e) { - // Optionally log or handle + /* do nothing */ } } } if (statusCode != 200) { - throw new OllamaBaseException(this.completeResponse); + throw new OllamaException(this.completeResponse); } - } catch (IOException | InterruptedException | OllamaBaseException e) { + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + this.succeeded = false; + this.completeResponse = "[FAILED] " + e.getMessage(); + } catch (IOException | OllamaException e) { this.succeeded = false; this.completeResponse = "[FAILED] " + e.getMessage(); } } - } diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaErrorResponse.java b/src/main/java/io/github/ollama4j/models/response/OllamaErrorResponse.java index bbc78c1..74faf2e 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaErrorResponse.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaErrorResponse.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @@ -7,5 +15,5 @@ import lombok.Data; @JsonIgnoreProperties(ignoreUnknown = true) public class OllamaErrorResponse { - private String error; + private String error; } diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaResult.java b/src/main/java/io/github/ollama4j/models/response/OllamaResult.java index ce6d5e3..2edc8e8 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaResult.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaResult.java @@ -1,21 +1,30 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; -import lombok.Data; -import lombok.Getter; - import java.util.HashMap; import java.util.List; import java.util.Map; - -import static io.github.ollama4j.utils.Utils.getObjectMapper; +import lombok.Data; +import lombok.Getter; +import lombok.Setter; /** * The type Ollama result. */ @Getter +@Setter @SuppressWarnings("unused") @Data @JsonIgnoreProperties(ignoreUnknown = true) @@ -24,14 +33,17 @@ public class OllamaResult { * Get the completion/response text */ private final String response; + /** * Get the thinking text (if available) */ private final String thinking; + /** * Get the response status code. */ private int httpStatusCode; + /** * Get the response time in milliseconds. */ @@ -75,7 +87,9 @@ public class OllamaResult { responseMap.put("promptEvalDuration", this.promptEvalDuration); responseMap.put("evalCount", this.evalCount); responseMap.put("evalDuration", this.evalDuration); - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(responseMap); + return getObjectMapper() + .writerWithDefaultPrettyPrinter() + .writeValueAsString(responseMap); } catch (JsonProcessingException e) { throw new RuntimeException(e); } @@ -95,17 +109,16 @@ public class OllamaResult { try { // Check if the response is a valid JSON - if ((!responseStr.trim().startsWith("{") && !responseStr.trim().startsWith("[")) || - (!responseStr.trim().endsWith("}") && !responseStr.trim().endsWith("]"))) { + if ((!responseStr.trim().startsWith("{") && !responseStr.trim().startsWith("[")) + || (!responseStr.trim().endsWith("}") && !responseStr.trim().endsWith("]"))) { throw new IllegalArgumentException("Response is not a valid JSON object"); } - Map response = getObjectMapper().readValue(responseStr, - new TypeReference>() { - }); - return response; + return getObjectMapper() + .readValue(responseStr, new TypeReference>() {}); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to parse response as JSON: " + e.getMessage(), e); + throw new IllegalArgumentException( + "Failed to parse response as JSON: " + e.getMessage(), e); } } @@ -126,13 +139,14 @@ public class OllamaResult { try { // Check if the response is a valid JSON - if ((!responseStr.trim().startsWith("{") && !responseStr.trim().startsWith("[")) || - (!responseStr.trim().endsWith("}") && !responseStr.trim().endsWith("]"))) { + if ((!responseStr.trim().startsWith("{") && !responseStr.trim().startsWith("[")) + || (!responseStr.trim().endsWith("}") && !responseStr.trim().endsWith("]"))) { throw new IllegalArgumentException("Response is not a valid JSON object"); } return getObjectMapper().readValue(responseStr, clazz); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to parse response as JSON: " + e.getMessage(), e); + throw new IllegalArgumentException( + "Failed to parse response as JSON: " + e.getMessage(), e); } } } diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaResultStream.java b/src/main/java/io/github/ollama4j/models/response/OllamaResultStream.java index de44d63..ace70f1 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaResultStream.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaResultStream.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import java.util.Iterator; diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java b/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java index 01bf446..17c6ba4 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaStructuredResult.java @@ -1,85 +1,90 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; +import static io.github.ollama4j.utils.Utils.getObjectMapper; + import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; +import java.util.List; +import java.util.Map; import lombok.Data; import lombok.Getter; import lombok.NoArgsConstructor; -import java.util.List; -import java.util.Map; - -import static io.github.ollama4j.utils.Utils.getObjectMapper; - @Getter @SuppressWarnings("unused") @Data @NoArgsConstructor @JsonIgnoreProperties(ignoreUnknown = true) public class OllamaStructuredResult { - private String response; - private String thinking; - private int httpStatusCode; - private long responseTime = 0; - private String model; + private String response; + private String thinking; + private int httpStatusCode; + private long responseTime = 0; + private String model; - private @JsonProperty("created_at") String createdAt; - private boolean done; - private @JsonProperty("done_reason") String doneReason; - private List context; - private @JsonProperty("total_duration") Long totalDuration; - private @JsonProperty("load_duration") Long loadDuration; - private @JsonProperty("prompt_eval_count") Integer promptEvalCount; - private @JsonProperty("prompt_eval_duration") Long promptEvalDuration; - private @JsonProperty("eval_count") Integer evalCount; - private @JsonProperty("eval_duration") Long evalDuration; + private @JsonProperty("created_at") String createdAt; + private boolean done; + private @JsonProperty("done_reason") String doneReason; + private List context; + private @JsonProperty("total_duration") Long totalDuration; + private @JsonProperty("load_duration") Long loadDuration; + private @JsonProperty("prompt_eval_count") Integer promptEvalCount; + private @JsonProperty("prompt_eval_duration") Long promptEvalDuration; + private @JsonProperty("eval_count") Integer evalCount; + private @JsonProperty("eval_duration") Long evalDuration; - public OllamaStructuredResult(String response, long responseTime, int httpStatusCode) { - this.response = response; - this.responseTime = responseTime; - this.httpStatusCode = httpStatusCode; - } - - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + public OllamaStructuredResult(String response, long responseTime, int httpStatusCode) { + this.response = response; + this.responseTime = responseTime; + this.httpStatusCode = httpStatusCode; } - } - /** - * Get the structured response if the response is a JSON object. - * - * @return Map - structured response - */ - public Map getStructuredResponse() { - try { - Map response = getObjectMapper().readValue(this.getResponse(), - new TypeReference>() { - }); - return response; - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + @Override + public String toString() { + try { + return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } - /** - * Get the structured response mapped to a specific class type. - * - * @param The type of class to map the response to - * @param clazz The class to map the response to - * @return An instance of the specified class with the response data - * @throws RuntimeException if there is an error mapping the response - */ - public T getStructuredResponse(Class clazz) { - try { - return getObjectMapper().readValue(this.getResponse(), clazz); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); + /** + * Get the structured response if the response is a JSON object. + * + * @return Map - structured response + */ + public Map getStructuredResponse() { + try { + return getObjectMapper() + .readValue(this.getResponse(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + /** + * Get the structured response mapped to a specific class type. + * + * @param The type of class to map the response to + * @param clazz The class to map the response to + * @return An instance of the specified class with the response data + * @throws RuntimeException if there is an error mapping the response + */ + public T getStructuredResponse(Class clazz) { + try { + return getObjectMapper().readValue(this.getResponse(), clazz); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } } - } } diff --git a/src/main/java/io/github/ollama4j/models/response/OllamaVersion.java b/src/main/java/io/github/ollama4j/models/response/OllamaVersion.java index 11b7524..a1bd907 100644 --- a/src/main/java/io/github/ollama4j/models/response/OllamaVersion.java +++ b/src/main/java/io/github/ollama4j/models/response/OllamaVersion.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.models.response; import lombok.Data; diff --git a/src/main/java/io/github/ollama4j/tools/OllamaToolCallsFunction.java b/src/main/java/io/github/ollama4j/tools/OllamaToolCallsFunction.java index f46f0bb..b7feb79 100644 --- a/src/main/java/io/github/ollama4j/tools/OllamaToolCallsFunction.java +++ b/src/main/java/io/github/ollama4j/tools/OllamaToolCallsFunction.java @@ -1,18 +1,24 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import java.util.Map; - @Data @NoArgsConstructor @AllArgsConstructor @JsonIgnoreProperties(ignoreUnknown = true) -public class OllamaToolCallsFunction -{ +public class OllamaToolCallsFunction { private String name; - private Map arguments; + private Map arguments; } diff --git a/src/main/java/io/github/ollama4j/tools/OllamaToolsResult.java b/src/main/java/io/github/ollama4j/tools/OllamaToolsResult.java index 35fada3..9854211 100644 --- a/src/main/java/io/github/ollama4j/tools/OllamaToolsResult.java +++ b/src/main/java/io/github/ollama4j/tools/OllamaToolsResult.java @@ -1,13 +1,20 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; import io.github.ollama4j.models.response.OllamaResult; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - import java.util.ArrayList; import java.util.List; import java.util.Map; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; @Data @NoArgsConstructor @@ -22,7 +29,8 @@ public class OllamaToolsResult { return results; } for (Map.Entry r : this.toolResults.entrySet()) { - results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue())); + results.add( + new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue())); } return results; } diff --git a/src/main/java/io/github/ollama4j/tools/ReflectionalToolFunction.java b/src/main/java/io/github/ollama4j/tools/ReflectionalToolFunction.java index 66d078b..d85793d 100644 --- a/src/main/java/io/github/ollama4j/tools/ReflectionalToolFunction.java +++ b/src/main/java/io/github/ollama4j/tools/ReflectionalToolFunction.java @@ -1,13 +1,20 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.Setter; - import java.lang.reflect.Method; import java.math.BigDecimal; import java.util.LinkedHashMap; import java.util.Map; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; /** * Specification of a {@link ToolFunction} that provides the implementation via java reflection calling. @@ -15,17 +22,18 @@ import java.util.Map; @Setter @Getter @AllArgsConstructor -public class ReflectionalToolFunction implements ToolFunction{ +public class ReflectionalToolFunction implements ToolFunction { private Object functionHolder; private Method function; - private LinkedHashMap propertyDefinition; + private LinkedHashMap propertyDefinition; @Override public Object apply(Map arguments) { LinkedHashMap argumentsCopy = new LinkedHashMap<>(this.propertyDefinition); - for (Map.Entry param : this.propertyDefinition.entrySet()){ - argumentsCopy.replace(param.getKey(),typeCast(arguments.get(param.getKey()),param.getValue())); + for (Map.Entry param : this.propertyDefinition.entrySet()) { + argumentsCopy.replace( + param.getKey(), typeCast(arguments.get(param.getKey()), param.getValue())); } try { return function.invoke(functionHolder, argumentsCopy.values().toArray()); @@ -35,7 +43,7 @@ public class ReflectionalToolFunction implements ToolFunction{ } private Object typeCast(Object inputValue, String className) { - if(className == null || inputValue == null) { + if (className == null || inputValue == null) { return null; } String inputValueString = inputValue.toString(); @@ -50,5 +58,4 @@ public class ReflectionalToolFunction implements ToolFunction{ return inputValueString; } } - } diff --git a/src/main/java/io/github/ollama4j/tools/ToolFunction.java b/src/main/java/io/github/ollama4j/tools/ToolFunction.java index 51ab8c5..c2dc6bf 100644 --- a/src/main/java/io/github/ollama4j/tools/ToolFunction.java +++ b/src/main/java/io/github/ollama4j/tools/ToolFunction.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; import java.util.Map; diff --git a/src/main/java/io/github/ollama4j/tools/ToolFunctionCallSpec.java b/src/main/java/io/github/ollama4j/tools/ToolFunctionCallSpec.java index 13d582f..afcefcd 100644 --- a/src/main/java/io/github/ollama4j/tools/ToolFunctionCallSpec.java +++ b/src/main/java/io/github/ollama4j/tools/ToolFunctionCallSpec.java @@ -1,11 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import java.util.Map; - @Data @AllArgsConstructor @NoArgsConstructor diff --git a/src/main/java/io/github/ollama4j/tools/ToolRegistry.java b/src/main/java/io/github/ollama4j/tools/ToolRegistry.java index b106042..273b684 100644 --- a/src/main/java/io/github/ollama4j/tools/ToolRegistry.java +++ b/src/main/java/io/github/ollama4j/tools/ToolRegistry.java @@ -1,28 +1,47 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; +import io.github.ollama4j.exceptions.ToolNotFoundException; +import java.util.*; public class ToolRegistry { - private final Map tools = new HashMap<>(); + private final List tools = new ArrayList<>(); - public ToolFunction getToolFunction(String name) { - final Tools.ToolSpecification toolSpecification = tools.get(name); - return toolSpecification != null ? toolSpecification.getToolFunction() : null; + public ToolFunction getToolFunction(String name) throws ToolNotFoundException { + for (Tools.Tool tool : tools) { + if (tool.getToolSpec().getName().equals(name)) { + return tool.getToolFunction(); + } + } + throw new ToolNotFoundException(String.format("Tool '%s' not found.", name)); } - public void addTool(String name, Tools.ToolSpecification specification) { - tools.put(name, specification); + public void addTool(Tools.Tool tool) { + try { + getToolFunction(tool.getToolSpec().getName()); + } catch (ToolNotFoundException e) { + tools.add(tool); + } } - public Collection getRegisteredSpecs() { - return tools.values(); + public void addTools(List tools) { + for (Tools.Tool tool : tools) { + addTool(tool); + } } - /** - * Removes all registered tools from the registry. - */ + public List getRegisteredTools() { + return tools; + } + + /** Removes all registered tools from the registry. */ public void clear() { tools.clear(); } diff --git a/src/main/java/io/github/ollama4j/tools/Tools.java b/src/main/java/io/github/ollama4j/tools/Tools.java index eb8dcca..79fa8e6 100644 --- a/src/main/java/io/github/ollama4j/tools/Tools.java +++ b/src/main/java/io/github/ollama4j/tools/Tools.java @@ -1,126 +1,119 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import io.github.ollama4j.utils.Utils; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - public class Tools { - @Data - @Builder - public static class ToolSpecification { - private String functionName; - private String functionDescription; - private PromptFuncDefinition toolPrompt; - private ToolFunction toolFunction; - } + private Tools() {} @Data - @JsonIgnoreProperties(ignoreUnknown = true) @Builder @NoArgsConstructor @AllArgsConstructor - public static class PromptFuncDefinition { - private String type; - private PromptFuncSpec function; + public static class Tool { + @JsonProperty("function") + private ToolSpec toolSpec; - @Data - @Builder - @NoArgsConstructor - @AllArgsConstructor - public static class PromptFuncSpec { - private String name; - private String description; - private Parameters parameters; - } - - @Data - @Builder - @NoArgsConstructor - @AllArgsConstructor - public static class Parameters { - private String type; - private Map properties; - private List required; - } - - @Data - @Builder - @NoArgsConstructor - @AllArgsConstructor - public static class Property { - private String type; - private String description; - @JsonProperty("enum") - @JsonInclude(JsonInclude.Include.NON_NULL) - private List enumValues; - @JsonIgnore - private boolean required; - } + @Builder.Default private String type = "function"; + @JsonIgnore private ToolFunction toolFunction; } - public static class PropsBuilder { - private final Map props = new HashMap<>(); - - public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) { - props.put(key, property); - return this; - } - - public Map build() { - return props; - } + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ToolSpec { + private String name; + private String description; + private Parameters parameters; } - public static class PromptBuilder { - private final List tools = new ArrayList<>(); + @Data + @NoArgsConstructor + @AllArgsConstructor + public static class Parameters { + private Map properties; + private List required = new ArrayList<>(); - private String promptText; - - public String build() throws JsonProcessingException { - return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]"; - } - - public PromptBuilder withPrompt(String prompt) throws JsonProcessingException { - promptText = prompt; - return this; - } - - public PromptBuilder withToolSpecification(ToolSpecification spec) { - PromptFuncDefinition def = new PromptFuncDefinition(); - def.setType("function"); - - PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); - functionDetail.setName(spec.getFunctionName()); - functionDetail.setDescription(spec.getFunctionDescription()); - - PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); - parameters.setType("object"); - parameters.setProperties(spec.getToolPrompt().getFunction().parameters.getProperties()); - - List requiredValues = new ArrayList<>(); - for (Map.Entry p : spec.getToolPrompt().getFunction().getParameters().getProperties().entrySet()) { - if (p.getValue().isRequired()) { - requiredValues.add(p.getKey()); + public static Parameters of(Map properties) { + Parameters params = new Parameters(); + params.setProperties(properties); + // Optionally, populate required from properties' required flags + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + if (entry.getValue() != null && entry.getValue().isRequired()) { + params.getRequired().add(entry.getKey()); + } } } - parameters.setRequired(requiredValues); - functionDetail.setParameters(parameters); - def.setFunction(functionDetail); + return params; + } - tools.add(def); - return this; + @Override + public String toString() { + ObjectNode node = + com.fasterxml.jackson.databind.json.JsonMapper.builder() + .build() + .createObjectNode(); + node.put("type", "object"); + if (properties != null) { + ObjectNode propsNode = node.putObject("properties"); + for (Map.Entry entry : properties.entrySet()) { + ObjectNode propNode = propsNode.putObject(entry.getKey()); + Property prop = entry.getValue(); + propNode.put("type", prop.getType()); + propNode.put("description", prop.getDescription()); + if (prop.getEnumValues() != null) { + propNode.putArray("enum") + .addAll( + prop.getEnumValues().stream() + .map( + com.fasterxml.jackson.databind.node.TextNode + ::new) + .collect(java.util.stream.Collectors.toList())); + } + } + } + if (required != null && !required.isEmpty()) { + node.putArray("required") + .addAll( + required.stream() + .map(com.fasterxml.jackson.databind.node.TextNode::new) + .collect(java.util.stream.Collectors.toList())); + } + return node.toPrettyString(); } } + + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class Property { + private String type; + private String description; + + @JsonProperty("enum") + @JsonInclude(JsonInclude.Include.NON_NULL) + private List enumValues; + + @JsonIgnore private boolean required; + } } diff --git a/src/main/java/io/github/ollama4j/tools/annotations/OllamaToolService.java b/src/main/java/io/github/ollama4j/tools/annotations/OllamaToolService.java index 5118430..d044fa5 100644 --- a/src/main/java/io/github/ollama4j/tools/annotations/OllamaToolService.java +++ b/src/main/java/io/github/ollama4j/tools/annotations/OllamaToolService.java @@ -1,23 +1,37 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools.annotations; import io.github.ollama4j.OllamaAPI; - import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * Annotates a class that calls {@link io.github.ollama4j.OllamaAPI} such that the Method - * {@link OllamaAPI#registerAnnotatedTools()} can be used to auto-register all provided classes (resp. all - * contained Methods of the provider classes annotated with {@link ToolSpec}). + * Annotation to mark a class as an Ollama tool service. + *

+ * When a class is annotated with {@code @OllamaToolService}, the method + * {@link OllamaAPI#registerAnnotatedTools()} can be used to automatically register all tool provider + * classes specified in the {@link #providers()} array. All methods in those provider classes that are + * annotated with {@link ToolSpec} will be registered as tools. + *

*/ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) public @interface OllamaToolService { /** - * @return Classes with no-arg constructor that will be used for tool-registration. + * Specifies the provider classes whose methods annotated with {@link ToolSpec} should be registered as tools. + * Each provider class must have a public no-argument constructor. + * + * @return an array of provider classes to be used for tool registration */ Class[] providers(); } diff --git a/src/main/java/io/github/ollama4j/tools/annotations/ToolProperty.java b/src/main/java/io/github/ollama4j/tools/annotations/ToolProperty.java index 28d9acc..f9721b5 100644 --- a/src/main/java/io/github/ollama4j/tools/annotations/ToolProperty.java +++ b/src/main/java/io/github/ollama4j/tools/annotations/ToolProperty.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools.annotations; import java.lang.annotation.ElementType; diff --git a/src/main/java/io/github/ollama4j/tools/annotations/ToolSpec.java b/src/main/java/io/github/ollama4j/tools/annotations/ToolSpec.java index 7f99768..04a3efb 100644 --- a/src/main/java/io/github/ollama4j/tools/annotations/ToolSpec.java +++ b/src/main/java/io/github/ollama4j/tools/annotations/ToolSpec.java @@ -1,28 +1,44 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.tools.annotations; import io.github.ollama4j.OllamaAPI; - import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * Annotates Methods of classes that should be registered as tools by {@link OllamaAPI#registerAnnotatedTools()} - * automatically. + * Annotation to mark a method as a tool that can be registered automatically by + * {@link OllamaAPI#registerAnnotatedTools()}. + *

+ * Methods annotated with {@code @ToolSpec} will be discovered and registered as tools + * when the containing class is specified as a provider in {@link OllamaToolService}. + *

*/ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) public @interface ToolSpec { /** - * @return tool-name that the method should be used as. Defaults to the methods name. + * Specifies the name of the tool as exposed to the LLM. + * If left empty, the method's name will be used as the tool name. + * + * @return the tool name */ String name() default ""; /** - * @return a detailed description of the method that can be interpreted by the llm, whether it should call the tool - * or not. + * Provides a detailed description of the tool's functionality. + * This description is used by the LLM to determine when to call the tool. + * + * @return the tool description */ String desc(); } diff --git a/src/main/java/io/github/ollama4j/tools/sampletools/WeatherTool.java b/src/main/java/io/github/ollama4j/tools/sampletools/WeatherTool.java deleted file mode 100644 index 7a32ab0..0000000 --- a/src/main/java/io/github/ollama4j/tools/sampletools/WeatherTool.java +++ /dev/null @@ -1,54 +0,0 @@ -package io.github.ollama4j.tools.sampletools; - -import io.github.ollama4j.tools.Tools; - -import java.util.Map; - -@SuppressWarnings("resource") -public class WeatherTool { - private String paramCityName = "cityName"; - - public WeatherTool() { - } - - public String getCurrentWeather(Map arguments) { - String city = (String) arguments.get(paramCityName); - return "It is sunny in " + city; - } - - public Tools.ToolSpecification getSpecification() { - return Tools.ToolSpecification.builder() - .functionName("weather-reporter") - .functionDescription( - "You are a tool who simply finds the city name from the user's message input/query about weather.") - .toolFunction(this::getCurrentWeather) - .toolPrompt( - Tools.PromptFuncDefinition.builder() - .type("prompt") - .function( - Tools.PromptFuncDefinition.PromptFuncSpec - .builder() - .name("get-city-name") - .description("Get the city name") - .parameters( - Tools.PromptFuncDefinition.Parameters - .builder() - .type("object") - .properties( - Map.of( - paramCityName, - Tools.PromptFuncDefinition.Property - .builder() - .type("string") - .description( - "The name of the city. e.g. Bengaluru") - .required(true) - .build())) - .required(java.util.List - .of(paramCityName)) - .build()) - .build()) - .build()) - .build(); - } -} diff --git a/src/main/java/io/github/ollama4j/types/OllamaModelType.java b/src/main/java/io/github/ollama4j/types/OllamaModelType.java deleted file mode 100644 index 8153e84..0000000 --- a/src/main/java/io/github/ollama4j/types/OllamaModelType.java +++ /dev/null @@ -1,87 +0,0 @@ -package io.github.ollama4j.types; - -/** - * A class to provide constants for all the supported models by Ollama. - * - *

Refer to the full list of models and the details here: https://ollama.ai/library - */ -@SuppressWarnings("ALL") -public class OllamaModelType { - public static final String GEMMA = "gemma"; - public static final String GEMMA2 = "gemma2"; - public static final String LLAMA2 = "llama2"; - public static final String LLAMA3 = "llama3"; - public static final String LLAMA3_1 = "llama3.1"; - public static final String MISTRAL = "mistral"; - public static final String MIXTRAL = "mixtral"; - public static final String DEEPSEEK_R1 = "deepseek-r1"; - public static final String LLAVA = "llava"; - public static final String LLAVA_PHI3 = "llava-phi3"; - public static final String NEURAL_CHAT = "neural-chat"; - public static final String CODELLAMA = "codellama"; - public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; - public static final String MISTRAL_OPENORCA = "mistral-openorca"; - public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; - public static final String PHI = "phi"; - public static final String PHI3 = "phi3"; - public static final String ORCA_MINI = "orca-mini"; - public static final String DEEPSEEK_CODER = "deepseek-coder"; - public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; - public static final String VICUNA = "vicuna"; - public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; - public static final String ZEPHYR = "zephyr"; - public static final String OPENHERMES = "openhermes"; - public static final String QWEN = "qwen"; - public static final String QWEN2 = "qwen2"; - public static final String WIZARDCODER = "wizardcoder"; - public static final String LLAMA2_CHINESE = "llama2-chinese"; - public static final String TINYLLAMA = "tinyllama"; - public static final String PHIND_CODELLAMA = "phind-codellama"; - public static final String OPENCHAT = "openchat"; - public static final String ORCA2 = "orca2"; - public static final String FALCON = "falcon"; - public static final String WIZARD_MATH = "wizard-math"; - public static final String TINYDOLPHIN = "tinydolphin"; - public static final String NOUS_HERMES = "nous-hermes"; - public static final String YI = "yi"; - public static final String DOLPHIN_PHI = "dolphin-phi"; - public static final String STARLING_LM = "starling-lm"; - public static final String STARCODER = "starcoder"; - public static final String CODEUP = "codeup"; - public static final String MEDLLAMA2 = "medllama2"; - public static final String STABLE_CODE = "stable-code"; - public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; - public static final String BAKLLAVA = "bakllava"; - public static final String EVERYTHINGLM = "everythinglm"; - public static final String SOLAR = "solar"; - public static final String STABLE_BELUGA = "stable-beluga"; - public static final String SQLCODER = "sqlcoder"; - public static final String YARN_MISTRAL = "yarn-mistral"; - public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; - public static final String SAMANTHA_MISTRAL = "samantha-mistral"; - public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; - public static final String MEDITRON = "meditron"; - public static final String WIZARD_VICUNA = "wizard-vicuna"; - public static final String STABLELM2 = "stablelm2"; - public static final String MAGICODER = "magicoder"; - public static final String YARN_LLAMA2 = "yarn-llama2"; - public static final String NOUS_HERMES2 = "nous-hermes2"; - public static final String DEEPSEEK_LLM = "deepseek-llm"; - public static final String LLAMA_PRO = "llama-pro"; - public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; - public static final String CODEBOOGA = "codebooga"; - public static final String MISTRALLITE = "mistrallite"; - public static final String NEXUSRAVEN = "nexusraven"; - public static final String GOLIATH = "goliath"; - public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; - public static final String NOTUX = "notux"; - public static final String ALFRED = "alfred"; - public static final String MEGADOLPHIN = "megadolphin"; - public static final String WIZARDLM = "wizardlm"; - public static final String XWINLM = "xwinlm"; - public static final String NOTUS = "notus"; - public static final String DUCKDB_NSQL = "duckdb-nsql"; - public static final String ALL_MINILM = "all-minilm"; - public static final String CODESTRAL = "codestral"; -} diff --git a/src/main/java/io/github/ollama4j/utils/BooleanToJsonFormatFlagSerializer.java b/src/main/java/io/github/ollama4j/utils/BooleanToJsonFormatFlagSerializer.java index 590b59e..6608097 100644 --- a/src/main/java/io/github/ollama4j/utils/BooleanToJsonFormatFlagSerializer.java +++ b/src/main/java/io/github/ollama4j/utils/BooleanToJsonFormatFlagSerializer.java @@ -1,21 +1,28 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; - import java.io.IOException; -public class BooleanToJsonFormatFlagSerializer extends JsonSerializer{ +public class BooleanToJsonFormatFlagSerializer extends JsonSerializer { @Override - public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException { - gen.writeString("json"); + public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) + throws IOException { + gen.writeString("json"); } @Override - public boolean isEmpty(SerializerProvider provider,Boolean value){ + public boolean isEmpty(SerializerProvider provider, Boolean value) { return !value; } - } diff --git a/src/main/java/io/github/ollama4j/utils/Constants.java b/src/main/java/io/github/ollama4j/utils/Constants.java index dfe5377..fbe0958 100644 --- a/src/main/java/io/github/ollama4j/utils/Constants.java +++ b/src/main/java/io/github/ollama4j/utils/Constants.java @@ -1,9 +1,18 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; public final class Constants { + private Constants() {} + public static final class HttpConstants { - private HttpConstants() { - } + private HttpConstants() {} public static final String APPLICATION_JSON = "application/json"; public static final String APPLICATION_XML = "application/xml"; diff --git a/src/main/java/io/github/ollama4j/utils/FileToBase64Serializer.java b/src/main/java/io/github/ollama4j/utils/FileToBase64Serializer.java index c54d83f..9fe2ece 100644 --- a/src/main/java/io/github/ollama4j/utils/FileToBase64Serializer.java +++ b/src/main/java/io/github/ollama4j/utils/FileToBase64Serializer.java @@ -1,9 +1,16 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; - import java.io.IOException; import java.util.Base64; import java.util.Collection; @@ -11,7 +18,9 @@ import java.util.Collection; public class FileToBase64Serializer extends JsonSerializer> { @Override - public void serialize(Collection value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException { + public void serialize( + Collection value, JsonGenerator jsonGenerator, SerializerProvider serializers) + throws IOException { jsonGenerator.writeStartArray(); for (byte[] file : value) { jsonGenerator.writeString(Base64.getEncoder().encodeToString(file)); diff --git a/src/main/java/io/github/ollama4j/utils/OllamaRequestBody.java b/src/main/java/io/github/ollama4j/utils/OllamaRequestBody.java index 805cec4..f6abf19 100644 --- a/src/main/java/io/github/ollama4j/utils/OllamaRequestBody.java +++ b/src/main/java/io/github/ollama4j/utils/OllamaRequestBody.java @@ -1,8 +1,15 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonProcessingException; - import java.net.http.HttpRequest.BodyPublisher; import java.net.http.HttpRequest.BodyPublishers; @@ -17,12 +24,11 @@ public interface OllamaRequestBody { * @return JSON representation of a OllamaRequest */ @JsonIgnore - default BodyPublisher getBodyPublisher(){ - try { - return BodyPublishers.ofString( - Utils.getObjectMapper().writeValueAsString(this)); + default BodyPublisher getBodyPublisher() { + try { + return BodyPublishers.ofString(Utils.getObjectMapper().writeValueAsString(this)); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Request not Body convertible.",e); + throw new IllegalArgumentException("Request not Body convertible.", e); } } } diff --git a/src/main/java/io/github/ollama4j/utils/Options.java b/src/main/java/io/github/ollama4j/utils/Options.java index c4ea79d..36b5264 100644 --- a/src/main/java/io/github/ollama4j/utils/Options.java +++ b/src/main/java/io/github/ollama4j/utils/Options.java @@ -1,12 +1,23 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; +import java.util.Map; +import lombok.Builder; import lombok.Data; -import java.util.Map; - -/** Class for options for Ollama model. */ +/** + * Class for options for Ollama model. + */ @Data +@Builder public class Options { - private final Map optionsMap; + private final Map optionsMap; } diff --git a/src/main/java/io/github/ollama4j/utils/OptionsBuilder.java b/src/main/java/io/github/ollama4j/utils/OptionsBuilder.java index 6ee8392..21d89a7 100644 --- a/src/main/java/io/github/ollama4j/utils/OptionsBuilder.java +++ b/src/main/java/io/github/ollama4j/utils/OptionsBuilder.java @@ -1,248 +1,259 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; import java.util.HashMap; -/** Builder class for creating options for Ollama model. */ +/** + * Builder class for creating options for Ollama model. + */ public class OptionsBuilder { - private final Options options; + private final Options options; - /** Constructs a new OptionsBuilder with an empty options map. */ - public OptionsBuilder() { - this.options = new Options(new HashMap<>()); - } - - /** - * Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 - * = Mirostat 2.0) - * - * @param value The value for the "mirostat" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setMirostat(int value) { - options.getOptionsMap().put("mirostat", value); - return this; - } - - /** - * Influences how quickly the algorithm responds to feedback from the generated text. A lower - * learning rate will result in slower adjustments, while a higher learning rate will make the - * algorithm more responsive. (Default: 0.1) - * - * @param value The value for the "mirostat_eta" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setMirostatEta(float value) { - options.getOptionsMap().put("mirostat_eta", value); - return this; - } - - /** - * Controls the balance between coherence and diversity of the output. A lower value will result - * in more focused and coherent text. (Default: 5.0) - * - * @param value The value for the "mirostat_tau" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setMirostatTau(float value) { - options.getOptionsMap().put("mirostat_tau", value); - return this; - } - - /** - * Sets the size of the context window used to generate the next token. (Default: 2048) - * - * @param value The value for the "num_ctx" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setNumCtx(int value) { - options.getOptionsMap().put("num_ctx", value); - return this; - } - - /** - * The number of GQA groups in the transformer layer. Required for some models, for example, it is - * 8 for llama2:70b. - * - * @param value The value for the "num_gqa" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setNumGqa(int value) { - options.getOptionsMap().put("num_gqa", value); - return this; - } - - /** - * The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, - * 0 to disable. - * - * @param value The value for the "num_gpu" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setNumGpu(int value) { - options.getOptionsMap().put("num_gpu", value); - return this; - } - - /** - * Sets the number of threads to use during computation. By default, Ollama will detect this for - * optimal performance. It is recommended to set this value to the number of physical CPU cores - * your system has (as opposed to the logical number of cores). - * - * @param value The value for the "num_thread" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setNumThread(int value) { - options.getOptionsMap().put("num_thread", value); - return this; - } - - /** - * Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, - * -1 = num_ctx) - * - * @param value The value for the "repeat_last_n" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setRepeatLastN(int value) { - options.getOptionsMap().put("repeat_last_n", value); - return this; - } - - /** - * Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions - * more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) - * - * @param value The value for the "repeat_penalty" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setRepeatPenalty(float value) { - options.getOptionsMap().put("repeat_penalty", value); - return this; - } - - /** - * The temperature of the model. Increasing the temperature will make the model answer more - * creatively. (Default: 0.8) - * - * @param value The value for the "temperature" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setTemperature(float value) { - options.getOptionsMap().put("temperature", value); - return this; - } - - /** - * Sets the random number seed to use for generation. Setting this to a specific number will make - * the model generate the same text for the same prompt. (Default: 0) - * - * @param value The value for the "seed" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setSeed(int value) { - options.getOptionsMap().put("seed", value); - return this; - } - - /** - * Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating - * text and return. Multiple stop patterns may be set by specifying multiple separate `stop` - * parameters in a modelfile. - * - * @param value The value for the "stop" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setStop(String value) { - options.getOptionsMap().put("stop", value); - return this; - } - - /** - * Tail free sampling is used to reduce the impact of less probable tokens from the output. A - * higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this - * setting. (default: 1) - * - * @param value The value for the "tfs_z" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setTfsZ(float value) { - options.getOptionsMap().put("tfs_z", value); - return this; - } - - /** - * Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite - * generation, -2 = fill context) - * - * @param value The value for the "num_predict" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setNumPredict(int value) { - options.getOptionsMap().put("num_predict", value); - return this; - } - - /** - * Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more - * diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) - * - * @param value The value for the "top_k" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setTopK(int value) { - options.getOptionsMap().put("top_k", value); - return this; - } - - /** - * Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a - * lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) - * - * @param value The value for the "top_p" parameter. - * @return The updated OptionsBuilder. - */ - public OptionsBuilder setTopP(float value) { - options.getOptionsMap().put("top_p", value); - return this; - } - - /** - * Alternative to the top_p, and aims to ensure a balance of qualityand variety. The parameter p - * represents the minimum probability for a token to be considered, relative to the probability - * of the most likely token. For example, with p=0.05 and the most likely token having a - * probability of 0.9, logits with a value less than 0.045 are filtered out. (Default: 0.0) - */ - public OptionsBuilder setMinP(float value) { - options.getOptionsMap().put("min_p", value); - return this; - } - - /** - * Allows passing an option not formally supported by the library - * @param name The option name for the parameter. - * @param value The value for the "{name}" parameter. - * @return The updated OptionsBuilder. - * @throws IllegalArgumentException if parameter has an unsupported type - */ - public OptionsBuilder setCustomOption(String name, Object value) throws IllegalArgumentException { - if (!(value instanceof Integer || value instanceof Float || value instanceof String)) { - throw new IllegalArgumentException("Invalid type for parameter. Allowed types are: Integer, Float, or String."); + /** + * Constructs a new OptionsBuilder with an empty options map. + */ + public OptionsBuilder() { + this.options = new Options(new HashMap<>()); } - options.getOptionsMap().put(name, value); - return this; - } + /** + * Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 + * = Mirostat 2.0) + * + * @param value The value for the "mirostat" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setMirostat(int value) { + options.getOptionsMap().put("mirostat", value); + return this; + } + /** + * Influences how quickly the algorithm responds to feedback from the generated text. A lower + * learning rate will result in slower adjustments, while a higher learning rate will make the + * algorithm more responsive. (Default: 0.1) + * + * @param value The value for the "mirostat_eta" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setMirostatEta(float value) { + options.getOptionsMap().put("mirostat_eta", value); + return this; + } - /** - * Builds the options map. - * - * @return The populated options map. - */ - public Options build() { - return options; - } + /** + * Controls the balance between coherence and diversity of the output. A lower value will result + * in more focused and coherent text. (Default: 5.0) + * + * @param value The value for the "mirostat_tau" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setMirostatTau(float value) { + options.getOptionsMap().put("mirostat_tau", value); + return this; + } + /** + * Sets the size of the context window used to generate the next token. (Default: 2048) + * + * @param value The value for the "num_ctx" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setNumCtx(int value) { + options.getOptionsMap().put("num_ctx", value); + return this; + } + /** + * The number of GQA groups in the transformer layer. Required for some models, for example, it is + * 8 for llama2:70b. + * + * @param value The value for the "num_gqa" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setNumGqa(int value) { + options.getOptionsMap().put("num_gqa", value); + return this; + } + + /** + * The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, + * 0 to disable. + * + * @param value The value for the "num_gpu" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setNumGpu(int value) { + options.getOptionsMap().put("num_gpu", value); + return this; + } + + /** + * Sets the number of threads to use during computation. By default, Ollama will detect this for + * optimal performance. It is recommended to set this value to the number of physical CPU cores + * your system has (as opposed to the logical number of cores). + * + * @param value The value for the "num_thread" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setNumThread(int value) { + options.getOptionsMap().put("num_thread", value); + return this; + } + + /** + * Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, + * -1 = num_ctx) + * + * @param value The value for the "repeat_last_n" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setRepeatLastN(int value) { + options.getOptionsMap().put("repeat_last_n", value); + return this; + } + + /** + * Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions + * more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) + * + * @param value The value for the "repeat_penalty" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setRepeatPenalty(float value) { + options.getOptionsMap().put("repeat_penalty", value); + return this; + } + + /** + * The temperature of the model. Increasing the temperature will make the model answer more + * creatively. (Default: 0.8) + * + * @param value The value for the "temperature" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setTemperature(float value) { + options.getOptionsMap().put("temperature", value); + return this; + } + + /** + * Sets the random number seed to use for generation. Setting this to a specific number will make + * the model generate the same text for the same prompt. (Default: 0) + * + * @param value The value for the "seed" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setSeed(int value) { + options.getOptionsMap().put("seed", value); + return this; + } + + /** + * Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating + * text and return. Multiple stop patterns may be set by specifying multiple separate `stop` + * parameters in a modelfile. + * + * @param value The value for the "stop" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setStop(String value) { + options.getOptionsMap().put("stop", value); + return this; + } + + /** + * Tail free sampling is used to reduce the impact of less probable tokens from the output. A + * higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this + * setting. (default: 1) + * + * @param value The value for the "tfs_z" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setTfsZ(float value) { + options.getOptionsMap().put("tfs_z", value); + return this; + } + + /** + * Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite + * generation, -2 = fill context) + * + * @param value The value for the "num_predict" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setNumPredict(int value) { + options.getOptionsMap().put("num_predict", value); + return this; + } + + /** + * Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more + * diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) + * + * @param value The value for the "top_k" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setTopK(int value) { + options.getOptionsMap().put("top_k", value); + return this; + } + + /** + * Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a + * lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) + * + * @param value The value for the "top_p" parameter. + * @return The updated OptionsBuilder. + */ + public OptionsBuilder setTopP(float value) { + options.getOptionsMap().put("top_p", value); + return this; + } + + /** + * Alternative to the top_p, and aims to ensure a balance of qualityand variety. The parameter p + * represents the minimum probability for a token to be considered, relative to the probability + * of the most likely token. For example, with p=0.05 and the most likely token having a + * probability of 0.9, logits with a value less than 0.045 are filtered out. (Default: 0.0) + */ + public OptionsBuilder setMinP(float value) { + options.getOptionsMap().put("min_p", value); + return this; + } + + /** + * Allows passing an option not formally supported by the library + * + * @param name The option name for the parameter. + * @param value The value for the "{name}" parameter. + * @return The updated OptionsBuilder. + * @throws IllegalArgumentException if parameter has an unsupported type + */ + public OptionsBuilder setCustomOption(String name, Object value) + throws IllegalArgumentException { + if (!(value instanceof Integer || value instanceof Float || value instanceof String)) { + throw new IllegalArgumentException( + "Invalid type for parameter. Allowed types are: Integer, Float, or String."); + } + options.getOptionsMap().put(name, value); + return this; + } + + /** + * Builds the options map. + * + * @return The populated options map. + */ + public Options build() { + return options; + } } diff --git a/src/main/java/io/github/ollama4j/utils/PromptBuilder.java b/src/main/java/io/github/ollama4j/utils/PromptBuilder.java index bb24ef8..fbb4fed 100644 --- a/src/main/java/io/github/ollama4j/utils/PromptBuilder.java +++ b/src/main/java/io/github/ollama4j/utils/PromptBuilder.java @@ -1,3 +1,11 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; /** @@ -18,52 +26,54 @@ package io.github.ollama4j.utils; */ public class PromptBuilder { - private final StringBuilder prompt; + private final StringBuilder prompt; - /** Constructs a new {@code PromptBuilder} with an empty prompt. */ - public PromptBuilder() { - this.prompt = new StringBuilder(); - } + /** + * Constructs a new {@code PromptBuilder} with an empty prompt. + */ + public PromptBuilder() { + this.prompt = new StringBuilder(); + } - /** - * Appends the specified text to the prompt. - * - * @param text the text to be added to the prompt - * @return a reference to this {@code PromptBuilder} instance for method chaining - */ - public PromptBuilder add(String text) { - prompt.append(text); - return this; - } + /** + * Appends the specified text to the prompt. + * + * @param text the text to be added to the prompt + * @return a reference to this {@code PromptBuilder} instance for method chaining + */ + public PromptBuilder add(String text) { + prompt.append(text); + return this; + } - /** - * Appends the specified text followed by a newline character to the prompt. - * - * @param text the text to be added as a line to the prompt - * @return a reference to this {@code PromptBuilder} instance for method chaining - */ - public PromptBuilder addLine(String text) { - prompt.append(text).append("\n"); - return this; - } + /** + * Appends the specified text followed by a newline character to the prompt. + * + * @param text the text to be added as a line to the prompt + * @return a reference to this {@code PromptBuilder} instance for method chaining + */ + public PromptBuilder addLine(String text) { + prompt.append(text).append("\n"); + return this; + } - /** - * Appends a separator line to the prompt. The separator is a newline followed by a line of - * dashes. - * - * @return a reference to this {@code PromptBuilder} instance for method chaining - */ - public PromptBuilder addSeparator() { - prompt.append("\n--------------------------------------------------\n"); - return this; - } + /** + * Appends a separator line to the prompt. The separator is a newline followed by a line of + * dashes. + * + * @return a reference to this {@code PromptBuilder} instance for method chaining + */ + public PromptBuilder addSeparator() { + prompt.append("\n--------------------------------------------------\n"); + return this; + } - /** - * Builds and returns the final prompt as a string. - * - * @return the final prompt as a string - */ - public String build() { - return prompt.toString(); - } + /** + * Builds and returns the final prompt as a string. + * + * @return the final prompt as a string + */ + public String build() { + return prompt.toString(); + } } diff --git a/src/main/java/io/github/ollama4j/utils/Utils.java b/src/main/java/io/github/ollama4j/utils/Utils.java index 6d2aa5e..3a24206 100644 --- a/src/main/java/io/github/ollama4j/utils/Utils.java +++ b/src/main/java/io/github/ollama4j/utils/Utils.java @@ -1,18 +1,31 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.utils; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; - -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.InputStream; import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class Utils { + private Utils() {} + + private static final Logger LOG = LoggerFactory.getLogger(Utils.class); private static ObjectMapper objectMapper; @@ -24,22 +37,51 @@ public class Utils { return objectMapper; } - public static byte[] loadImageBytesFromUrl(String imageUrl) - throws IOException, URISyntaxException { - URL url = new URI(imageUrl).toURL(); - try (InputStream in = url.openStream(); - ByteArrayOutputStream out = new ByteArrayOutputStream()) { - byte[] buffer = new byte[1024]; - int bytesRead; - while ((bytesRead = in.read(buffer)) != -1) { - out.write(buffer, 0, bytesRead); - } - return out.toByteArray(); + public static byte[] loadImageBytesFromUrl( + String imageUrl, int connectTimeoutSeconds, int readTimeoutSeconds) + throws IOException, InterruptedException { + LOG.debug( + "Attempting to load image from URL: {} (connectTimeout={}s, readTimeout={}s)", + imageUrl, + connectTimeoutSeconds, + readTimeoutSeconds); + HttpClient client = + HttpClient.newBuilder() + .connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) + .build(); + HttpRequest request = + HttpRequest.newBuilder() + .uri(URI.create(imageUrl)) + .timeout(Duration.ofSeconds(readTimeoutSeconds)) + .header("User-Agent", "Mozilla/5.0") + .GET() + .build(); + LOG.debug("Sending HTTP GET request to {}", imageUrl); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofByteArray()); + LOG.debug("Received HTTP response with status code: {}", response.statusCode()); + if (response.statusCode() >= 200 && response.statusCode() < 300) { + LOG.debug( + "Successfully loaded image from URL: {} ({} bytes)", + imageUrl, + response.body().length); + return response.body(); + } else { + LOG.error( + "Failed to load image from URL: {}. HTTP status: {}", + imageUrl, + response.statusCode()); + throw new IOException("Failed to load image: HTTP " + response.statusCode()); } } public static File getFileFromClasspath(String fileName) { + LOG.debug("Trying to load file from classpath: {}", fileName); ClassLoader classLoader = Utils.class.getClassLoader(); return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); } + + public static String toJSON(Object object) throws JsonProcessingException { + return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(object); + } } diff --git a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java index 51d8edf..c86856e 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java +++ b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java @@ -1,20 +1,37 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.integrationtests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.exceptions.OllamaBaseException; -import io.github.ollama4j.exceptions.ToolInvocationException; +import io.github.ollama4j.exceptions.OllamaException; +import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler; +import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler; import io.github.ollama4j.models.chat.*; -import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; -import io.github.ollama4j.models.response.LibraryModel; +import io.github.ollama4j.models.embed.OllamaEmbedRequest; +import io.github.ollama4j.models.embed.OllamaEmbedResult; +import io.github.ollama4j.models.generate.OllamaGenerateRequest; +import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder; +import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; import io.github.ollama4j.models.response.Model; import io.github.ollama4j.models.response.ModelDetail; import io.github.ollama4j.models.response.OllamaResult; import io.github.ollama4j.samples.AnnotatedTool; import io.github.ollama4j.tools.OllamaToolCallsFunction; -import io.github.ollama4j.tools.ToolFunction; import io.github.ollama4j.tools.Tools; import io.github.ollama4j.tools.annotations.OllamaToolService; import io.github.ollama4j.utils.OptionsBuilder; +import java.io.File; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.CountDownLatch; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; import org.junit.jupiter.api.Order; @@ -24,18 +41,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.ollama.OllamaContainer; -import java.io.File; -import java.io.IOException; -import java.net.ConnectException; -import java.net.URISyntaxException; -import java.util.*; - -import static org.junit.jupiter.api.Assertions.*; - @OllamaToolService(providers = {AnnotatedTool.class}) @TestMethodOrder(OrderAnnotation.class) - -@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection"}) +@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection", "FieldCanBeLocal", "ConstantValue"}) class OllamaAPIIntegrationTest { private static final Logger LOG = LoggerFactory.getLogger(OllamaAPIIntegrationTest.class); @@ -44,22 +52,65 @@ class OllamaAPIIntegrationTest { private static final String EMBEDDING_MODEL = "all-minilm"; private static final String VISION_MODEL = "moondream:1.8b"; - private static final String THINKING_TOOL_MODEL = "gpt-oss:20b"; + private static final String THINKING_TOOL_MODEL = "deepseek-r1:1.5b"; + private static final String THINKING_TOOL_MODEL_2 = "qwen3:0.6b"; private static final String GENERAL_PURPOSE_MODEL = "gemma3:270m"; private static final String TOOLS_MODEL = "mistral:7b"; + /** + * Initializes the OllamaAPI instance for integration tests. + * + *

This method sets up the OllamaAPI client, either using an external Ollama host (if + * environment variables are set) or by starting a Testcontainers-based Ollama instance. It also + * configures request timeout and model pull retry settings. + */ @BeforeAll static void setUp() { + // ... (no javadoc needed for private setup logic) + int requestTimeoutSeconds = 60; + int numberOfRetriesForModelPull = 5; + try { - boolean useExternalOllamaHost = Boolean.parseBoolean(System.getenv("USE_EXTERNAL_OLLAMA_HOST")); - String ollamaHost = System.getenv("OLLAMA_HOST"); + String useExternalOllamaHostEnv = System.getenv("USE_EXTERNAL_OLLAMA_HOST"); + String ollamaHostEnv = System.getenv("OLLAMA_HOST"); + + boolean useExternalOllamaHost; + String ollamaHost; + + if (useExternalOllamaHostEnv == null && ollamaHostEnv == null) { + Properties props = new Properties(); + try { + props.load( + OllamaAPIIntegrationTest.class + .getClassLoader() + .getResourceAsStream("test-config.properties")); + } catch (Exception e) { + throw new RuntimeException( + "Could not load test-config.properties from classpath", e); + } + useExternalOllamaHost = + Boolean.parseBoolean( + props.getProperty("USE_EXTERNAL_OLLAMA_HOST", "false")); + ollamaHost = props.getProperty("OLLAMA_HOST"); + requestTimeoutSeconds = + Integer.parseInt(props.getProperty("REQUEST_TIMEOUT_SECONDS")); + numberOfRetriesForModelPull = + Integer.parseInt(props.getProperty("NUMBER_RETRIES_FOR_MODEL_PULL")); + } else { + useExternalOllamaHost = Boolean.parseBoolean(useExternalOllamaHostEnv); + ollamaHost = ollamaHostEnv; + } if (useExternalOllamaHost) { - LOG.info("Using external Ollama host..."); + LOG.info("Using external Ollama host: {}", ollamaHost); api = new OllamaAPI(ollamaHost); } else { throw new RuntimeException( - "USE_EXTERNAL_OLLAMA_HOST is not set so, we will be using Testcontainers Ollama host for the tests now. If you would like to use an external host, please set the env var to USE_EXTERNAL_OLLAMA_HOST=true and set the env var OLLAMA_HOST=http://localhost:11435 or a different host/port."); + "USE_EXTERNAL_OLLAMA_HOST is not set so, we will be using Testcontainers" + + " Ollama host for the tests now. If you would like to use an external" + + " host, please set the env var to USE_EXTERNAL_OLLAMA_HOST=true and" + + " set the env var OLLAMA_HOST=http://localhost:11435 or a different" + + " host/port."); } } catch (Exception e) { String ollamaVersion = "0.6.1"; @@ -72,689 +123,1416 @@ class OllamaAPIIntegrationTest { ollama.setPortBindings(portBindings); ollama.start(); LOG.info("Using Testcontainer Ollama host..."); - api = new OllamaAPI("http://" + ollama.getHost() + ":" + ollama.getMappedPort(internalPort)); + api = + new OllamaAPI( + "http://" + + ollama.getHost() + + ":" + + ollama.getMappedPort(internalPort)); } - api.setRequestTimeoutSeconds(120); - api.setNumberOfRetriesForModelPull(5); + api.setRequestTimeoutSeconds(requestTimeoutSeconds); + api.setNumberOfRetriesForModelPull(numberOfRetriesForModelPull); } + /** + * Verifies that a ConnectException is thrown when attempting to connect to a non-existent + * Ollama endpoint. + * + *

Scenario: Ensures the API client fails gracefully when the Ollama server is unreachable. + */ @Test @Order(1) - void testWrongEndpoint() { + void shouldThrowConnectExceptionForWrongEndpoint() { OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434"); - assertThrows(ConnectException.class, ollamaAPI::listModels); + assertThrows(OllamaException.class, ollamaAPI::listModels); } + /** + * Tests retrieval of the Ollama server version. + * + *

Scenario: Calls the /api/version endpoint and asserts a non-null version string is + * returned. + */ @Test @Order(1) - void testVersionAPI() throws URISyntaxException, IOException, OllamaBaseException, InterruptedException { - // String expectedVersion = ollama.getDockerImageName().split(":")[1]; - String actualVersion = api.getVersion(); - assertNotNull(actualVersion); - // assertEquals(expectedVersion, actualVersion, "Version should match the Docker - // image version"); + void shouldReturnVersionFromVersionAPI() throws OllamaException { + String version = api.getVersion(); + assertNotNull(version); } + /** + * Tests the /api/ping endpoint for server liveness. + * + *

Scenario: Ensures the Ollama server responds to ping requests. + */ @Test @Order(1) - void testPing() throws URISyntaxException, IOException, OllamaBaseException, InterruptedException { + void shouldPingSuccessfully() throws OllamaException { boolean pingResponse = api.ping(); assertTrue(pingResponse, "Ping should return true"); } + /** + * Tests listing all available models from the Ollama server. + * + *

Scenario: Calls /api/tags and verifies the returned list is not null (may be empty). + */ @Test @Order(2) - void testListModelsAPI() throws URISyntaxException, IOException, OllamaBaseException, InterruptedException { - // Fetch the list of models + void shouldListModels() throws OllamaException { List models = api.listModels(); - // Assert that the models list is not null assertNotNull(models, "Models should not be null"); - // Assert that models list is either empty or contains more than 0 models - assertTrue(models.size() >= 0, "Models list should not be empty"); + assertTrue(models.size() >= 0, "Models list can be empty or contain elements"); } @Test @Order(2) - void testListModelsFromLibrary() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - List models = api.listModelsFromLibrary(); - assertNotNull(models); - assertFalse(models.isEmpty()); + void shouldUnloadModel() { + final String model = GENERAL_PURPOSE_MODEL; + assertDoesNotThrow( + () -> api.unloadModel(model), "unloadModel should not throw any exception"); } + /** + * Tests pulling a model and verifying it appears in the model list. + * + *

Scenario: Pulls an embedding model, then checks that it is present in the list of models. + */ @Test @Order(3) - void testPullModelAPI() throws URISyntaxException, IOException, OllamaBaseException, InterruptedException { + void shouldPullModelAndListModels() throws OllamaException { api.pullModel(EMBEDDING_MODEL); List models = api.listModels(); assertNotNull(models, "Models should not be null"); assertFalse(models.isEmpty(), "Models list should contain elements"); } + /** + * Tests fetching detailed information for a specific model. + * + *

Scenario: Pulls a model and retrieves its details, asserting the model file contains the + * model name. + */ @Test @Order(4) - void testListModelDetails() throws IOException, OllamaBaseException, URISyntaxException, InterruptedException { + void shouldGetModelDetails() throws OllamaException { api.pullModel(EMBEDDING_MODEL); ModelDetail modelDetails = api.getModelDetails(EMBEDDING_MODEL); assertNotNull(modelDetails); assertTrue(modelDetails.getModelFile().contains(EMBEDDING_MODEL)); } + /** + * Tests generating embeddings for a batch of input texts. + * + *

Scenario: Uses the embedding model to generate vector embeddings for two input sentences. + */ @Test @Order(5) - void testEmbeddings() throws Exception { + void shouldReturnEmbeddings() throws Exception { api.pullModel(EMBEDDING_MODEL); - OllamaEmbedResponseModel embeddings = api.embed(EMBEDDING_MODEL, - Arrays.asList("Why is the sky blue?", "Why is the grass green?")); + OllamaEmbedRequest m = new OllamaEmbedRequest(); + m.setModel(EMBEDDING_MODEL); + m.setInput(Arrays.asList("Why is the sky blue?", "Why is the grass green?")); + OllamaEmbedResult embeddings = api.embed(m); assertNotNull(embeddings, "Embeddings should not be null"); assertFalse(embeddings.getEmbeddings().isEmpty(), "Embeddings should not be empty"); } + /** + * Tests generating structured output using the 'format' parameter. + * + *

Scenario: Calls generateWithFormat with a prompt and a JSON schema, expecting a structured + * response. Usage: generate with format, no thinking, no streaming. + */ @Test @Order(6) - void testGenerateWithStructuredOutput() - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + void shouldGenerateWithStructuredOutput() throws OllamaException { api.pullModel(TOOLS_MODEL); - String prompt = "The sun is shining brightly and is directly overhead at the zenith, casting my shadow over my foot, so it must be noon."; + String prompt = + "The sun is shining brightly and is directly overhead at the zenith, casting my" + + " shadow over my foot, so it must be noon."; Map format = new HashMap<>(); format.put("type", "object"); - format.put("properties", new HashMap() { - { - put("isNoon", new HashMap() { + format.put( + "properties", + new HashMap() { { - put("type", "boolean"); + put( + "isNoon", + new HashMap() { + { + put("type", "boolean"); + } + }); } }); - } - }); format.put("required", List.of("isNoon")); - OllamaResult result = api.generate(TOOLS_MODEL, prompt, format); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(TOOLS_MODEL) + .withPrompt(prompt) + .withFormat(format) + .build(); + OllamaGenerateStreamObserver handler = null; + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - - assertEquals(true, result.getStructuredResponse().get("isNoon")); + assertNotNull(result.getStructuredResponse().get("isNoon")); } + /** + * Tests basic text generation with default options. + * + *

Scenario: Calls generate with a general-purpose model, no thinking, no streaming, no + * format. Usage: generate, raw=false, think=false, no streaming. + */ @Test @Order(6) - void testGennerateModelWithDefaultOptions() - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + void shouldGenerateWithDefaultOptions() throws OllamaException { api.pullModel(GENERAL_PURPOSE_MODEL); boolean raw = false; boolean thinking = false; - OllamaResult result = api.generate(GENERAL_PURPOSE_MODEL, - "What is the capital of France? And what's France's connection with Mona Lisa?", raw, - thinking, new OptionsBuilder().build()); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt( + "What is the capital of France? And what's France's connection with" + + " Mona Lisa?") + .withRaw(raw) + .withThink(thinking) + .withOptions(new OptionsBuilder().build()) + .build(); + OllamaGenerateStreamObserver handler = null; + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } + /** + * Tests text generation with streaming enabled. + * + *

Scenario: Calls generate with a general-purpose model, streaming the response tokens. + * Usage: generate, raw=false, think=false, streaming enabled. + */ @Test @Order(7) - void testGenerateWithDefaultOptionsStreamed() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + void shouldGenerateWithDefaultOptionsStreamed() throws OllamaException { api.pullModel(GENERAL_PURPOSE_MODEL); boolean raw = false; - StringBuffer sb = new StringBuffer(); - OllamaResult result = api.generate(GENERAL_PURPOSE_MODEL, - "What is the capital of France? And what's France's connection with Mona Lisa?", raw, - new OptionsBuilder().build(), (s) -> { - LOG.info(s); - sb.append(s); - }); - + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt( + "What is the capital of France? And what's France's connection with" + + " Mona Lisa?") + .withRaw(raw) + .withThink(false) + .withOptions(new OptionsBuilder().build()) + .build(); + OllamaResult result = + api.generate( + request, + new OllamaGenerateStreamObserver( + null, new ConsoleOutputGenerateTokenHandler())); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - assertEquals(sb.toString(), result.getResponse()); } + /** + * Tests chat API with custom options (e.g., temperature). + * + *

Scenario: Builds a chat request with system and user messages, sets a custom temperature, + * and verifies the response. Usage: chat, no tools, no thinking, no streaming, custom options. + */ @Test @Order(8) - void testGenerateWithOptions() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { + void shouldGenerateWithCustomOptions() throws OllamaException { api.pullModel(GENERAL_PURPOSE_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(GENERAL_PURPOSE_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, - "You are a helpful assistant who can generate random person's first and last names in the format [First name, Last name].") - .build(); - requestModel = builder.withMessages(requestModel.getMessages()) - .withMessage(OllamaChatMessageRole.USER, "Give me a cool name") - .withOptions(new OptionsBuilder().setTemperature(0.5f).build()).build(); - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.SYSTEM, + "You are a helpful assistant who can generate random person's first" + + " and last names in the format [First name, Last name].") + .build(); + requestModel = + builder.withMessages(requestModel.getMessages()) + .withMessage(OllamaChatMessageRole.USER, "Give me a cool name") + .withOptions(new OptionsBuilder().setTemperature(0.5f).build()) + .build(); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); - assertFalse(chatResult.getResponseModel().getMessage().getContent().isEmpty()); + assertFalse(chatResult.getResponseModel().getMessage().getResponse().isEmpty()); } + /** + * Tests chat API with a system prompt and verifies the assistant's response. + * + *

Scenario: Sends a system prompt instructing the assistant to reply with a specific word, + * then checks the response. Usage: chat, no tools, no thinking, no streaming, system prompt. + */ @Test @Order(9) - void testChatWithSystemPrompt() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { + void shouldChatWithSystemPrompt() throws OllamaException { api.pullModel(GENERAL_PURPOSE_MODEL); String expectedResponse = "Bhai"; - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(GENERAL_PURPOSE_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, String.format( - "[INSTRUCTION-START] You are an obidient and helpful bot named %s. You always answer with only one word and that word is your name. [INSTRUCTION-END]", - expectedResponse)).withMessage(OllamaChatMessageRole.USER, "Who are you?") - .withOptions(new OptionsBuilder().setTemperature(0.0f).build()).build(); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.SYSTEM, + String.format( + "[INSTRUCTION-START] You are an obidient and helpful bot" + + " named %s. You always answer with only one word and" + + " that word is your name. [INSTRUCTION-END]", + expectedResponse)) + .withMessage(OllamaChatMessageRole.USER, "Who are you?") + .withOptions(new OptionsBuilder().setTemperature(0.0f).build()) + .build(); - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); assertNotNull(chatResult.getResponseModel().getMessage()); - assertFalse(chatResult.getResponseModel().getMessage().getContent().isBlank()); - assertTrue(chatResult.getResponseModel().getMessage().getContent().contains(expectedResponse)); + assertFalse(chatResult.getResponseModel().getMessage().getResponse().isBlank()); + assertTrue( + chatResult + .getResponseModel() + .getMessage() + .getResponse() + .contains(expectedResponse)); assertEquals(3, chatResult.getChatHistory().size()); } + /** + * Tests chat API with multi-turn conversation (chat history). + * + *

Scenario: Sends a sequence of user messages, each time including the chat history, and + * verifies the assistant's responses. Usage: chat, no tools, no thinking, no streaming, + * multi-turn. + */ @Test @Order(10) - void testChat() throws Exception { + void shouldChatWithHistory() throws Exception { api.pullModel(THINKING_TOOL_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(THINKING_TOOL_MODEL); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL); - // Create the initial user question - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, "What is 1+1? Answer only in numbers.") - .build(); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, "What is 1+1? Answer only in numbers.") + .build(); - // Start conversation with model - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatResult chatResult = api.chat(requestModel, null); - assertTrue(chatResult.getChatHistory().stream().anyMatch(chat -> chat.getContent().contains("2")), - "Expected chat history to contain '2'"); + assertNotNull(chatResult); + assertNotNull(chatResult.getChatHistory()); + assertNotNull(chatResult.getChatHistory().stream()); - requestModel = builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "And what is its squared value?").build(); + requestModel = + builder.withMessages(chatResult.getChatHistory()) + .withMessage(OllamaChatMessageRole.USER, "And what is its squared value?") + .build(); - // Continue conversation with model - chatResult = api.chat(requestModel); + chatResult = api.chat(requestModel, null); - assertTrue(chatResult.getChatHistory().stream().anyMatch(chat -> chat.getContent().contains("4")), - "Expected chat history to contain '4'"); + assertNotNull(chatResult); + assertNotNull(chatResult.getChatHistory()); + assertNotNull(chatResult.getChatHistory().stream()); - // Create the next user question: the third question - requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, - "What is the largest value between 2, 4 and 6?").build(); + requestModel = + builder.withMessages(chatResult.getChatHistory()) + .withMessage( + OllamaChatMessageRole.USER, + "What is the largest value between 2, 4 and 6?") + .build(); - // Continue conversation with the model for the third question - chatResult = api.chat(requestModel); + chatResult = api.chat(requestModel, null); - // verify the result assertNotNull(chatResult, "Chat result should not be null"); - assertTrue(chatResult.getChatHistory().size() > 2, + assertTrue( + chatResult.getChatHistory().size() > 2, "Chat history should contain more than two messages"); - assertTrue(chatResult.getChatHistory().get(chatResult.getChatHistory().size() - 1).getContent() - .contains("6"), "Response should contain '6'"); } + /** + * Tests chat API with explicit tool invocation (client does not handle tools). + * + *

Scenario: Registers a tool, sends a user message that triggers a tool call, and verifies + * the tool call and arguments. Usage: chat, explicit tool, useTools=false, no thinking, no + * streaming. + */ @Test @Order(11) - void testChatWithExplicitToolDefinition() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { - // Ensure default behavior (library handles tools) for baseline assertions - api.setClientHandlesTools(false); + void shouldChatWithExplicitTool() throws OllamaException { String theToolModel = TOOLS_MODEL; api.pullModel(theToolModel); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(theToolModel); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(theToolModel); - api.registerTool(employeeFinderTool()); + api.registerTool(EmployeeFinderToolSpec.getSpecification()); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "Give me the ID and address of the employee Rahul Kumar.").build(); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Give me the ID and address of the employee Rahul Kumar.") + .build(); requestModel.setOptions(new OptionsBuilder().setTemperature(0.9f).build().getOptionsMap()); - - OllamaChatResult chatResult = api.chat(requestModel); + requestModel.setUseTools(true); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult, "chatResult should not be null"); assertNotNull(chatResult.getResponseModel(), "Response model should not be null"); - assertNotNull(chatResult.getResponseModel().getMessage(), "Response message should not be null"); + assertNotNull( + chatResult.getResponseModel().getMessage(), "Response message should not be null"); assertEquals( OllamaChatMessageRole.ASSISTANT.getRoleName(), chatResult.getResponseModel().getMessage().getRole().getRoleName(), - "Role of the response message should be ASSISTANT" - ); + "Role of the response message should be ASSISTANT"); List toolCalls = chatResult.getChatHistory().get(1).getToolCalls(); - assertEquals(1, toolCalls.size(), "There should be exactly one tool call in the second chat history message"); + assert (!toolCalls.isEmpty()); OllamaToolCallsFunction function = toolCalls.get(0).getFunction(); - assertEquals("get-employee-details", function.getName(), "Tool function name should be 'get-employee-details'"); - assertFalse(function.getArguments().isEmpty(), "Tool function arguments should not be empty"); + assertEquals( + "get-employee-details", + function.getName(), + "Tool function name should be 'get-employee-details'"); + assertFalse( + function.getArguments().isEmpty(), "Tool function arguments should not be empty"); Object employeeName = function.getArguments().get("employee-name"); assertNotNull(employeeName, "Employee name argument should not be null"); assertEquals("Rahul Kumar", employeeName, "Employee name argument should be 'Rahul Kumar'"); - assertTrue(chatResult.getChatHistory().size() > 2, "Chat history should have more than 2 messages"); - List finalToolCalls = chatResult.getResponseModel().getMessage().getToolCalls(); + assertTrue( + chatResult.getChatHistory().size() > 2, + "Chat history should have more than 2 messages"); + List finalToolCalls = + chatResult.getResponseModel().getMessage().getToolCalls(); assertNull(finalToolCalls, "Final tool calls in the response message should be null"); } + /** + * Tests chat API with explicit tool invocation and useTools=true. + * + *

Scenario: Registers a tool, enables useTools, sends a user message, and verifies the + * assistant's tool call. Usage: chat, explicit tool, useTools=true, no thinking, no streaming. + */ @Test @Order(13) - void testChatWithExplicitToolDefinitionWithClientHandlesTools() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { + void shouldChatWithExplicitToolAndUseTools() throws OllamaException { String theToolModel = TOOLS_MODEL; api.pullModel(theToolModel); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(theToolModel); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(theToolModel); - api.registerTool(employeeFinderTool()); + api.registerTool(EmployeeFinderToolSpec.getSpecification()); - try { - // enable client-handled tools so the library does not auto-execute tool calls - api.setClientHandlesTools(true); - - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, "Give me the ID and address of the employee Rahul Kumar.") - .build(); - requestModel.setOptions(new OptionsBuilder().setTemperature(0.9f).build().getOptionsMap()); - - OllamaChatResult chatResult = api.chat(requestModel); - - assertNotNull(chatResult, "chatResult should not be null"); - assertNotNull(chatResult.getResponseModel(), "Response model should not be null"); - assertNotNull(chatResult.getResponseModel().getMessage(), "Response message should not be null"); - assertEquals( - OllamaChatMessageRole.ASSISTANT.getRoleName(), - chatResult.getResponseModel().getMessage().getRole().getRoleName(), - "Role of the response message should be ASSISTANT" - ); - - // When clientHandlesTools is true, the assistant message should contain tool calls - List toolCalls = chatResult.getResponseModel().getMessage().getToolCalls(); - assertNotNull(toolCalls, "Assistant message should contain tool calls when clientHandlesTools is true"); - assertFalse(toolCalls.isEmpty(), "Tool calls should not be empty"); - OllamaToolCallsFunction function = toolCalls.get(0).getFunction(); - assertEquals("get-employee-details", function.getName(), "Tool function name should be 'get-employee-details'"); - Object employeeName = function.getArguments().get("employee-name"); - assertNotNull(employeeName, "Employee name argument should not be null"); - assertEquals("Rahul Kumar", employeeName, "Employee name argument should be 'Rahul Kumar'"); - - // Since tools were not auto-executed, chat history should contain only the user and assistant messages - assertEquals(2, chatResult.getChatHistory().size(), - "Chat history should contain only user and assistant (tool call) messages when clientHandlesTools is true"); - } finally { - // reset to default to avoid affecting other tests - api.setClientHandlesTools(false); - } - } - - @Test - @Order(14) - void testChatWithToolsAndStream() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { - // Ensure default behavior (library handles tools) for streamed test - api.setClientHandlesTools(false); - String theToolModel = TOOLS_MODEL; - api.pullModel(theToolModel); - - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(theToolModel); - - api.registerTool(employeeFinderTool()); - - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, "Give me the ID and address of employee Rahul Kumar") - .withKeepAlive("0m").withOptions(new OptionsBuilder().setTemperature(0.9f).build()) - .build(); - - OllamaChatResult chatResult = api.chat(requestModel, (s) -> { - LOG.info(s.toUpperCase()); - }, (s) -> { - LOG.info(s.toLowerCase()); - }); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Give me the ID and address of the employee Rahul Kumar.") + .build(); + requestModel.setOptions(new OptionsBuilder().setTemperature(0.9f).build().getOptionsMap()); + requestModel.setUseTools(true); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult, "chatResult should not be null"); assertNotNull(chatResult.getResponseModel(), "Response model should not be null"); - assertNotNull(chatResult.getResponseModel().getMessage(), "Response message should not be null"); + assertNotNull( + chatResult.getResponseModel().getMessage(), "Response message should not be null"); assertEquals( OllamaChatMessageRole.ASSISTANT.getRoleName(), chatResult.getResponseModel().getMessage().getRole().getRoleName(), - "Role of the response message should be ASSISTANT" - ); + "Role of the response message should be ASSISTANT"); + + boolean toolCalled = false; + List msgs = chatResult.getChatHistory(); + for (OllamaChatMessage msg : msgs) { + if (msg.getRole().equals(OllamaChatMessageRole.TOOL)) { + toolCalled = true; + } + } + assertTrue(toolCalled, "Assistant message should contain tool calls when useTools is true"); + } + + /** + * Tests chat API with explicit tool invocation and streaming enabled. + * + *

Scenario: Registers a tool, sends a user message, and streams the assistant's response + * (with tool call). Usage: chat, explicit tool, useTools=false, streaming enabled. + */ + @Test + @Order(14) + void shouldChatWithToolsAndStream() throws OllamaException { + String theToolModel = TOOLS_MODEL; + api.pullModel(theToolModel); + + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(theToolModel); + + api.registerTool(EmployeeFinderToolSpec.getSpecification()); + + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Give me the ID and address of employee Rahul Kumar") + .withKeepAlive("0m") + .withOptions(new OptionsBuilder().setTemperature(0.9f).build()) + .build(); + requestModel.setUseTools(true); + OllamaChatResult chatResult = api.chat(requestModel, new ConsoleOutputChatTokenHandler()); + + assertNotNull(chatResult, "chatResult should not be null"); + assertNotNull(chatResult.getResponseModel(), "Response model should not be null"); + assertNotNull( + chatResult.getResponseModel().getMessage(), "Response message should not be null"); + assertEquals( + OllamaChatMessageRole.ASSISTANT.getRoleName(), + chatResult.getResponseModel().getMessage().getRole().getRoleName(), + "Role of the response message should be ASSISTANT"); List toolCalls = chatResult.getChatHistory().get(1).getToolCalls(); - assertEquals(1, toolCalls.size(), "There should be exactly one tool call in the second chat history message"); + assertEquals( + 1, + toolCalls.size(), + "There should be exactly one tool call in the second chat history message"); OllamaToolCallsFunction function = toolCalls.get(0).getFunction(); - assertEquals("get-employee-details", function.getName(), "Tool function name should be 'get-employee-details'"); - assertFalse(function.getArguments().isEmpty(), "Tool function arguments should not be empty"); - Object employeeName = function.getArguments().get("employee-name"); - assertNotNull(employeeName, "Employee name argument should not be null"); - assertEquals("Rahul Kumar", employeeName, "Employee name argument should be 'Rahul Kumar'"); - assertTrue(chatResult.getChatHistory().size() > 2, "Chat history should have more than 2 messages"); - List finalToolCalls = chatResult.getResponseModel().getMessage().getToolCalls(); + assertEquals( + "get-employee-details", + function.getName(), + "Tool function name should be 'get-employee-details'"); + assertFalse( + function.getArguments().isEmpty(), "Tool function arguments should not be empty"); + assertTrue( + chatResult.getChatHistory().size() > 2, + "Chat history should have more than 2 messages"); + List finalToolCalls = + chatResult.getResponseModel().getMessage().getToolCalls(); assertNull(finalToolCalls, "Final tool calls in the response message should be null"); } + /** + * Tests chat API with an annotated tool (single parameter). + * + *

Scenario: Registers annotated tools, sends a user message that triggers a tool call, and + * verifies the tool call and arguments. Usage: chat, annotated tool, no thinking, no streaming. + */ @Test @Order(12) - void testChatWithAnnotatedToolsAndSingleParam() throws OllamaBaseException, IOException, InterruptedException, - URISyntaxException, ToolInvocationException { + void shouldChatWithAnnotatedToolSingleParam() throws OllamaException { String theToolModel = TOOLS_MODEL; api.pullModel(theToolModel); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(theToolModel); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(theToolModel); api.registerAnnotatedTools(); - OllamaChatRequest requestModel = builder - .withMessage(OllamaChatMessageRole.USER, - "Compute the most important constant in the world using 5 digits") - .build(); - - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Compute the most important constant in the world using 5 digits") + .build(); + requestModel.setUseTools(true); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); assertNotNull(chatResult.getResponseModel().getMessage()); - assertEquals(OllamaChatMessageRole.ASSISTANT.getRoleName(), + assertEquals( + OllamaChatMessageRole.ASSISTANT.getRoleName(), chatResult.getResponseModel().getMessage().getRole().getRoleName()); List toolCalls = chatResult.getChatHistory().get(1).getToolCalls(); - assertEquals(1, toolCalls.size()); + assert (!toolCalls.isEmpty()); OllamaToolCallsFunction function = toolCalls.get(0).getFunction(); assertEquals("computeImportantConstant", function.getName()); - assertEquals(1, function.getArguments().size()); + assert (!function.getArguments().isEmpty()); Object noOfDigits = function.getArguments().get("noOfDigits"); assertNotNull(noOfDigits); assertEquals("5", noOfDigits.toString()); assertTrue(chatResult.getChatHistory().size() > 2); - List finalToolCalls = chatResult.getResponseModel().getMessage().getToolCalls(); + List finalToolCalls = + chatResult.getResponseModel().getMessage().getToolCalls(); assertNull(finalToolCalls); } + /** + * Tests chat API with an annotated tool (multiple parameters). + * + *

Scenario: Registers annotated tools, sends a user message that may trigger a tool call + * with multiple arguments. Usage: chat, annotated tool, no thinking, no streaming, multiple + * parameters. + * + *

Note: This test is non-deterministic due to model variability; some assertions are + * commented out. + */ @Test @Order(13) - void testChatWithAnnotatedToolsAndMultipleParams() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { + void shouldChatWithAnnotatedToolMultipleParams() throws OllamaException { String theToolModel = TOOLS_MODEL; api.pullModel(theToolModel); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(theToolModel); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(theToolModel); api.registerAnnotatedTools(new AnnotatedTool()); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "Greet Rahul with a lot of hearts and respond to me with count of emojis that have been in used in the greeting") - .build(); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Greet Rahul with a lot of hearts and respond to me with count of" + + " emojis that have been in used in the greeting") + .build(); - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); assertNotNull(chatResult.getResponseModel().getMessage()); - assertEquals(OllamaChatMessageRole.ASSISTANT.getRoleName(), + assertEquals( + OllamaChatMessageRole.ASSISTANT.getRoleName(), chatResult.getResponseModel().getMessage().getRole().getRoleName()); - List toolCalls = chatResult.getChatHistory().get(1).getToolCalls(); - assertEquals(1, toolCalls.size()); - OllamaToolCallsFunction function = toolCalls.get(0).getFunction(); - assertEquals("sayHello", function.getName()); - assertEquals(2, function.getArguments().size()); - Object name = function.getArguments().get("name"); - assertNotNull(name); - assertEquals("Rahul", name); - Object numberOfHearts = function.getArguments().get("numberOfHearts"); - assertNotNull(numberOfHearts); - assertTrue(Integer.parseInt(numberOfHearts.toString()) > 1); - assertTrue(chatResult.getChatHistory().size() > 2); - List finalToolCalls = chatResult.getResponseModel().getMessage().getToolCalls(); - assertNull(finalToolCalls); } + /** + * Tests chat API with streaming enabled (no tools, no thinking). + * + *

Scenario: Sends a user message and streams the assistant's response. Usage: chat, no + * tools, no thinking, streaming enabled. + */ @Test @Order(15) - void testChatWithStream() throws OllamaBaseException, IOException, URISyntaxException, InterruptedException, - ToolInvocationException { + void shouldChatWithStream() throws OllamaException { api.deregisterTools(); api.pullModel(GENERAL_PURPOSE_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(GENERAL_PURPOSE_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "What is the capital of France? And what's France's connection with" + + " Mona Lisa?") + .build(); requestModel.setThink(false); - StringBuffer sb = new StringBuffer(); - OllamaChatResult chatResult = api.chat(requestModel, (s) -> { - LOG.info(s.toUpperCase()); - sb.append(s); - }, (s) -> { - LOG.info(s.toLowerCase()); - sb.append(s); - }); + OllamaChatResult chatResult = api.chat(requestModel, new ConsoleOutputChatTokenHandler()); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); assertNotNull(chatResult.getResponseModel().getMessage()); - assertNotNull(chatResult.getResponseModel().getMessage().getContent()); - assertEquals(sb.toString(), chatResult.getResponseModel().getMessage().getContent()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); } + /** + * Tests chat API with thinking and streaming enabled. + * + *

Scenario: Sends a user message with thinking enabled and streams the assistant's response. + * Usage: chat, no tools, thinking enabled, streaming enabled. + */ @Test @Order(15) - void testChatWithThinkingAndStream() throws OllamaBaseException, IOException, URISyntaxException, - InterruptedException, ToolInvocationException { - api.pullModel(THINKING_TOOL_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(THINKING_TOOL_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .withThinking(true).withKeepAlive("0m").build(); - StringBuffer sb = new StringBuffer(); + void shouldChatWithThinkingAndStream() throws OllamaException { + api.pullModel(THINKING_TOOL_MODEL_2); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL_2); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "What is the capital of France? And what's France's connection with" + + " Mona Lisa?") + .withThinking(true) + .withKeepAlive("0m") + .build(); - OllamaChatResult chatResult = api.chat(requestModel, (s) -> { - sb.append(s); - LOG.info(s.toUpperCase()); - }, (s) -> { - sb.append(s); - LOG.info(s.toLowerCase()); - }); + OllamaChatResult chatResult = api.chat(requestModel, new ConsoleOutputChatTokenHandler()); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); assertNotNull(chatResult.getResponseModel().getMessage()); - assertNotNull(chatResult.getResponseModel().getMessage().getContent()); - assertEquals(sb.toString(), chatResult.getResponseModel().getMessage().getThinking() - + chatResult.getResponseModel().getMessage().getContent()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); } + /** + * Tests chat API with an image input from a URL. + * + *

Scenario: Sends a user message with an image URL and verifies the assistant's response. + * Usage: chat, vision model, image from URL, no tools, no thinking, no streaming. + */ @Test @Order(10) - void testChatWithImageFromURL() throws OllamaBaseException, IOException, InterruptedException, - URISyntaxException, ToolInvocationException { + void shouldChatWithImageFromURL() throws OllamaException, IOException, InterruptedException { api.pullModel(VISION_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(VISION_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What's in the picture?", Collections.emptyList(), - "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") - .build(); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(VISION_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "What's in the picture?", + Collections.emptyList(), + "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") + .build(); api.registerAnnotatedTools(new OllamaAPIIntegrationTest()); - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); } + /** + * Tests chat API with an image input from a file and multi-turn history. + * + *

Scenario: Sends a user message with an image file, then continues the conversation with + * chat history. Usage: chat, vision model, image from file, multi-turn, no tools, no thinking, + * no streaming. + */ @Test @Order(10) - void testChatWithImageFromFileWithHistoryRecognition() throws OllamaBaseException, IOException, - URISyntaxException, InterruptedException, ToolInvocationException { + void shouldChatWithImageFromFileAndHistory() throws OllamaException { api.pullModel(VISION_MODEL); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(VISION_MODEL); - OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What's in the picture?", Collections.emptyList(), - List.of(getImageFileFromClasspath("emoji-smile.jpeg"))).build(); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(VISION_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "What's in the picture?", + Collections.emptyList(), + List.of(getImageFileFromClasspath("emoji-smile.jpeg"))) + .build(); - OllamaChatResult chatResult = api.chat(requestModel); + OllamaChatResult chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); builder.reset(); - requestModel = builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "What's the color?").build(); + requestModel = + builder.withMessages(chatResult.getChatHistory()) + .withMessage(OllamaChatMessageRole.USER, "What's the color?") + .build(); - chatResult = api.chat(requestModel); + chatResult = api.chat(requestModel, null); assertNotNull(chatResult); assertNotNull(chatResult.getResponseModel()); } - @Test - @Order(17) - void testGenerateWithOptionsAndImageURLs() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - api.pullModel(VISION_MODEL); - - OllamaResult result = api.generateWithImageURLs(VISION_MODEL, "What is in this image?", - List.of("https://i.pinimg.com/736x/f9/4e/cb/f94ecba040696a3a20b484d2e15159ec.jpg"), - new OptionsBuilder().build()); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - } - + /** + * Tests generateWithImages using an image file as input. + * + *

Scenario: Calls generateWithImages with a vision model and an image file, expecting a + * non-empty response. Usage: generateWithImages, image from file, no streaming. + */ @Test @Order(18) - void testGenerateWithOptionsAndImageFiles() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + void shouldGenerateWithImageFiles() throws OllamaException { api.pullModel(VISION_MODEL); - File imageFile = getImageFileFromClasspath("roses.jpg"); try { - OllamaResult result = api.generateWithImageFiles(VISION_MODEL, "What is in this image?", - List.of(imageFile), new OptionsBuilder().build()); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(VISION_MODEL) + .withPrompt("What is in this image?") + .withRaw(false) + .withThink(false) + .withOptions(new OptionsBuilder().build()) + .withImages(List.of(getImageFileFromClasspath("roses.jpg"))) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = null; + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException e) { + } catch (OllamaException e) { fail(e); + } catch (IOException e) { + throw new RuntimeException(e); } } + /** + * Tests generateWithImages with image file input and streaming enabled. + * + *

Scenario: Calls generateWithImages with a vision model, an image file, and a streaming + * handler for the response. Usage: generateWithImages, image from file, streaming enabled. + */ @Test @Order(20) - void testGenerateWithOptionsAndImageFilesStreamed() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + void shouldGenerateWithImageFilesAndResponseStreamed() throws OllamaException, IOException { api.pullModel(VISION_MODEL); - - File imageFile = getImageFileFromClasspath("roses.jpg"); - - StringBuffer sb = new StringBuffer(); - - OllamaResult result = api.generateWithImageFiles(VISION_MODEL, "What is in this image?", - List.of(imageFile), new OptionsBuilder().build(), (s) -> { - LOG.info(s); - sb.append(s); - }); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(VISION_MODEL) + .withPrompt("What is in this image?") + .withRaw(false) + .withThink(false) + .withOptions(new OptionsBuilder().build()) + .withImages(List.of(getImageFileFromClasspath("roses.jpg"))) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = + new OllamaGenerateStreamObserver( + new ConsoleOutputGenerateTokenHandler(), + new ConsoleOutputGenerateTokenHandler()); + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - assertEquals(sb.toString(), result.getResponse()); } + /** + * Tests generate with thinking enabled (no streaming). + * + *

Scenario: Calls generate with think=true, expecting both response and thinking fields to + * be populated. Usage: generate, think=true, no streaming. + */ @Test @Order(20) - void testGenerateWithThinking() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + void shouldGenerateWithThinking() throws OllamaException { api.pullModel(THINKING_TOOL_MODEL); boolean raw = false; boolean think = true; - OllamaResult result = api.generate(THINKING_TOOL_MODEL, "Who are you?", raw, think, - new OptionsBuilder().build()); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(THINKING_TOOL_MODEL) + .withPrompt("Who are you?") + .withRaw(raw) + .withThink(think) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null); + + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); assertNotNull(result.getThinking()); - assertFalse(result.getThinking().isEmpty()); } + /** + * Tests generate with thinking and streaming enabled. + * + *

Scenario: Calls generate with think=true and a stream handler for both thinking and + * response tokens. Usage: generate, think=true, streaming enabled. + */ @Test @Order(20) - void testGenerateWithThinkingAndStreamHandler() - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + void shouldGenerateWithThinkingAndStreamHandler() throws OllamaException { api.pullModel(THINKING_TOOL_MODEL); - boolean raw = false; + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(THINKING_TOOL_MODEL) + .withPrompt("Who are you?") + .withRaw(raw) + .withThink(true) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = + new OllamaGenerateStreamObserver( + thinkingToken -> { + LOG.info(thinkingToken.toUpperCase()); + }, + resToken -> { + LOG.info(resToken.toLowerCase()); + }); - StringBuffer sb = new StringBuffer(); - OllamaResult result = api.generate(THINKING_TOOL_MODEL, "Who are you?", raw, - new OptionsBuilder().build(), - (thinkingToken) -> { - sb.append(thinkingToken); - LOG.info(thinkingToken); - }, - (resToken) -> { - sb.append(resToken); - LOG.info(resToken); - } - ); + OllamaResult result = api.generate(request, handler); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertNotNull(result.getThinking()); + } + + /** + * Tests generate with raw=true parameter. + * + *

Scenario: Calls generate with raw=true, which sends the prompt as-is without any + * formatting. Usage: generate, raw=true, no thinking, no streaming. + */ + @Test + @Order(21) + void shouldGenerateWithRawMode() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + api.unloadModel(GENERAL_PURPOSE_MODEL); + boolean raw = true; + boolean thinking = false; + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt("What is 2+2?") + .withRaw(raw) + .withThink(thinking) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null); + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - assertNotNull(result.getThinking()); - assertFalse(result.getThinking().isEmpty()); - assertEquals(sb.toString(), result.getThinking() + result.getResponse()); } + /** + * Tests generate with raw=true and streaming enabled. + * + *

Scenario: Calls generate with raw=true and streams the response. Usage: generate, + * raw=true, no thinking, streaming enabled. + */ + @Test + @Order(22) + void shouldGenerateWithRawModeAndStreaming() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + boolean raw = true; + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt("What is the largest planet in our solar system?") + .withRaw(raw) + .withThink(false) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = + new OllamaGenerateStreamObserver(null, new ConsoleOutputGenerateTokenHandler()); + OllamaResult result = api.generate(request, handler); + + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } + + /** + * Tests generate with all parameters enabled: raw=true, thinking=true, and streaming. + * + *

Scenario: Calls generate with all possible parameters enabled. Usage: generate, raw=true, + * thinking enabled, streaming enabled. + */ + @Test + @Order(24) + void shouldGenerateWithAllParametersEnabled() throws OllamaException { + api.pullModel(THINKING_TOOL_MODEL); + // Settinng raw here instructs to keep the response raw. Even if the model generates + // 'thinking' tokens, they will not be received as separate tokens and will be mised with + // 'response' tokens + boolean raw = true; + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(THINKING_TOOL_MODEL) + .withPrompt( + "Count 1 to 5. Just give me the numbers and do not give any other" + + " details or information.") + .withRaw(raw) + .withThink(true) + .withOptions(new OptionsBuilder().setTemperature(0.1f).build()) + .withFormat(null) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = + new OllamaGenerateStreamObserver( + thinkingToken -> LOG.info("Thinking token: {}", thinkingToken), + responseToken -> LOG.info("Response token: {}", responseToken)); + OllamaResult result = api.generate(request, handler); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertNotNull(result.getThinking()); + } + + /** + * Tests generateWithFormat with complex nested JSON schema. + * + *

Scenario: Uses a more complex JSON schema with nested objects and arrays. Usage: + * generateWithFormat with complex schema. + */ + @Test + @Order(25) + void shouldGenerateWithComplexStructuredOutput() throws OllamaException { + api.pullModel(TOOLS_MODEL); + + String prompt = + "Generate information about three major cities: their names, populations, and top" + + " attractions."; + + Map format = new HashMap<>(); + format.put("type", "object"); + Map properties = new HashMap<>(); + + Map citiesProperty = new HashMap<>(); + citiesProperty.put("type", "array"); + + Map cityItem = new HashMap<>(); + cityItem.put("type", "object"); + + Map cityProperties = new HashMap<>(); + cityProperties.put("name", Map.of("type", "string")); + cityProperties.put("population", Map.of("type", "number")); + + Map attractionsProperty = new HashMap<>(); + attractionsProperty.put("type", "array"); + attractionsProperty.put("items", Map.of("type", "string")); + cityProperties.put("attractions", attractionsProperty); + + cityItem.put("properties", cityProperties); + cityItem.put("required", List.of("name", "population", "attractions")); + + citiesProperty.put("items", cityItem); + properties.put("cities", citiesProperty); + + format.put("properties", properties); + format.put("required", List.of("cities")); + + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(TOOLS_MODEL) + .withPrompt(prompt) + .withFormat(format) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = null; + + OllamaResult result = api.generate(request, handler); + + assertNotNull(result); + assertNotNull(result.getResponse()); + assertNotNull(result.getStructuredResponse()); + assertTrue(result.getStructuredResponse().containsKey("cities")); + } + + /** + * Tests chat with thinking enabled but no streaming. + * + *

Scenario: Enables thinking in chat mode without streaming. Usage: chat, thinking enabled, + * no streaming, no tools. + */ + @Test + @Order(26) + void shouldChatWithThinkingNoStream() throws OllamaException { + api.pullModel(THINKING_TOOL_MODEL); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "What is the meaning of life? Think deeply about this.") + .withThinking(true) + .build(); + + OllamaChatResult chatResult = api.chat(requestModel, null); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + assertNotNull(chatResult.getResponseModel().getMessage()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); + // Note: Thinking content might be in the message or separate field depending on + // implementation + } + + /** + * Tests chat with custom options and streaming. + * + *

Scenario: Combines custom options (temperature, top_p, etc.) with streaming. Usage: chat, + * custom options, streaming enabled, no tools, no thinking. + */ + @Test + @Order(27) + void shouldChatWithCustomOptionsAndStreaming() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Tell me a creative story about a time traveler") + .withOptions( + new OptionsBuilder() + .setTemperature(0.9f) + .setTopP(0.9f) + .setTopK(40) + .build()) + .build(); + + OllamaChatResult chatResult = api.chat(requestModel, new ConsoleOutputChatTokenHandler()); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); + assertFalse(chatResult.getResponseModel().getMessage().getResponse().isEmpty()); + } + + /** + * Tests chat with tools, thinking, and streaming all enabled. + * + *

Scenario: The most complex chat scenario with all features enabled. Usage: chat, tools, + * thinking enabled, streaming enabled. + */ + @Test + @Order(28) + void shouldChatWithToolsThinkingAndStreaming() throws OllamaException { + api.pullModel(THINKING_TOOL_MODEL_2); + + api.registerTool(EmployeeFinderToolSpec.getSpecification()); + + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL_2); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "I need to find information about employee John Smith. Think" + + " carefully about what details to retrieve.") + .withThinking(true) + .withOptions(new OptionsBuilder().setTemperature(0.1f).build()) + .build(); + requestModel.setUseTools(false); + OllamaChatResult chatResult = api.chat(requestModel, new ConsoleOutputChatTokenHandler()); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + // Verify that either tools were called or a response was generated + assertTrue(chatResult.getChatHistory().size() >= 2); + } + + /** + * Tests chat with multiple images in a single message. + * + *

Scenario: Sends multiple images in one chat message. Usage: chat, vision model, multiple + * images, no tools, no thinking, no streaming. + */ + @Test + @Order(31) + void shouldChatWithMultipleImages() throws OllamaException { + api.pullModel(VISION_MODEL); + + List tools = Collections.emptyList(); + + File image1 = getImageFileFromClasspath("emoji-smile.jpeg"); + File image2 = getImageFileFromClasspath("roses.jpg"); + + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(VISION_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Compare these images and tell me what you see", + tools, + Arrays.asList(image1, image2)) + .build(); + requestModel.setUseTools(false); + OllamaChatResult chatResult = api.chat(requestModel, null); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); + assertFalse(chatResult.getResponseModel().getMessage().getResponse().isEmpty()); + } + + /** + * Tests error handling when model doesn't exist. + * + *

Scenario: Attempts to use a non-existent model and verifies proper error handling. + */ + @Test + @Order(32) + void shouldHandleNonExistentModel() { + String nonExistentModel = "this-model-does-not-exist:latest"; + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(nonExistentModel) + .withPrompt("Hello") + .withRaw(false) + .withThink(false) + .withOptions(new OptionsBuilder().build()) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null); + assertThrows( + OllamaException.class, + () -> { + api.generate(request, handler); + }); + } + + /** + * Tests chat with empty message (edge case). + * + *

Scenario: Sends an empty or whitespace-only message. Usage: chat, edge case testing. + */ + @Test + @Order(33) + void shouldHandleEmptyMessage() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + + List tools = Collections.emptyList(); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage(OllamaChatMessageRole.USER, " ", tools) // whitespace only + .build(); + requestModel.setUseTools(false); + OllamaChatResult chatResult = api.chat(requestModel, null); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + // Should handle gracefully even with empty input + } + + /** + * Tests generate with very high temperature setting. + * + *

Scenario: Tests extreme parameter values for robustness. Usage: generate, extreme + * parameters, edge case testing. + */ + @Test + @Order(34) + void shouldGenerateWithExtremeParameters() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt("Generate a random word") + .withRaw(false) + .withThink(false) + .withOptions( + new OptionsBuilder() + .setTemperature(2.0f) // Very high temperature + .setTopP(1.0f) + .setTopK(1) + .build()) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null); + OllamaResult result = api.generate(request, handler); + assertNotNull(result); + assertNotNull(result.getResponse()); + } + + /** + * Tests embeddings with single input string. + * + *

Scenario: Tests embedding generation with a single string instead of array. Usage: embed, + * single input. + */ + @Test + @Order(35) + void shouldReturnEmbeddingsForSingleInput() throws Exception { + api.pullModel(EMBEDDING_MODEL); + + OllamaEmbedRequest requestModel = new OllamaEmbedRequest(); + requestModel.setModel(EMBEDDING_MODEL); + requestModel.setInput( + Collections.singletonList("This is a single test sentence for embedding.")); + + OllamaEmbedResult embeddings = api.embed(requestModel); + + assertNotNull(embeddings); + assertFalse(embeddings.getEmbeddings().isEmpty()); + assertEquals(1, embeddings.getEmbeddings().size()); + } + + /** + * Tests chat with keep-alive parameter. + * + *

Scenario: Tests the keep-alive parameter which controls model unloading. Usage: chat, + * keep-alive parameter, model lifecycle management. + */ + @Test + @Order(36) + void shouldChatWithKeepAlive() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder().withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage(OllamaChatMessageRole.USER, "Hello, how are you?") + .withKeepAlive("5m") // Keep model loaded for 5 minutes + .build(); + requestModel.setUseTools(false); + OllamaChatResult chatResult = api.chat(requestModel, null); + + assertNotNull(chatResult); + assertNotNull(chatResult.getResponseModel()); + assertNotNull(chatResult.getResponseModel().getMessage().getResponse()); + } + + /** + * Tests generate with custom context window options. + * + *

Scenario: Tests generation with custom context length and other advanced options. Usage: + * generate, advanced options, context management. + */ + @Test + @Order(37) + void shouldGenerateWithAdvancedOptions() throws OllamaException { + api.pullModel(GENERAL_PURPOSE_MODEL); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL) + .withPrompt("Write a detailed explanation of machine learning") + .withRaw(false) + .withThink(false) + .withOptions( + new OptionsBuilder() + .setTemperature(0.7f) + .setTopP(0.9f) + .setTopK(40) + .setNumCtx(4096) // Context window size + .setRepeatPenalty(1.1f) + .build()) + .withKeepAlive("0m") + .build(); + OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null); + OllamaResult result = api.generate(request, handler); + + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } + + /** + * Tests concurrent chat requests to verify thread safety. + * + *

Scenario: Sends multiple chat requests concurrently to test thread safety. Usage: chat, + * concurrency testing, thread safety. + */ + @Test + @Order(38) + void shouldHandleConcurrentChatRequests() throws OllamaException, InterruptedException { + api.pullModel(GENERAL_PURPOSE_MODEL); + + int numThreads = 3; + CountDownLatch latch = new CountDownLatch(numThreads); + List results = Collections.synchronizedList(new ArrayList<>()); + List exceptions = Collections.synchronizedList(new ArrayList<>()); + + for (int i = 0; i < numThreads; i++) { + final int threadId = i; + Thread thread = + new Thread( + () -> { + try { + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder() + .withModel(GENERAL_PURPOSE_MODEL); + OllamaChatRequest requestModel = + builder.withMessage( + OllamaChatMessageRole.USER, + "Hello from thread " + + threadId + + ". What is 2+2?") + .build(); + requestModel.setUseTools(false); + OllamaChatResult result = api.chat(requestModel, null); + results.add(result); + } catch (Exception e) { + exceptions.add(e); + } finally { + latch.countDown(); + } + }); + thread.start(); + } + + latch.await(60, java.util.concurrent.TimeUnit.SECONDS); + + assertTrue(exceptions.isEmpty(), "No exceptions should occur during concurrent requests"); + assertEquals(numThreads, results.size(), "All requests should complete successfully"); + + for (OllamaChatResult result : results) { + assertNotNull(result); + assertNotNull(result.getResponseModel()); + assertNotNull(result.getResponseModel().getMessage().getResponse()); + } + } + + /** + * Utility method to retrieve an image file from the classpath. + * + *

+ * + * @param fileName the name of the image file + * @return the File object for the image + */ private File getImageFileFromClasspath(String fileName) { ClassLoader classLoader = getClass().getClassLoader(); return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); } +} - private Tools.ToolSpecification employeeFinderTool() { - return Tools.ToolSpecification.builder() - .functionName("get-employee-details") - .functionDescription("Get details for a person or an employee") - .toolPrompt(Tools.PromptFuncDefinition.builder().type("function") - .function(Tools.PromptFuncDefinition.PromptFuncSpec.builder() +class EmployeeFinderToolSpec { + private EmployeeFinderToolSpec() { + /* empty constructor */ + } + + public static Tools.Tool getSpecification() { + return Tools.Tool.builder() + .toolSpec( + Tools.ToolSpec.builder() .name("get-employee-details") - .description("Get details for a person or an employee") - .parameters(Tools.PromptFuncDefinition.Parameters - .builder().type("object") - .properties(new Tools.PropsBuilder() - .withProperty("employee-name", - Tools.PromptFuncDefinition.Property - .builder() + .description("Get employee details from the company database") + .parameters( + Tools.Parameters.of( + Map.of( + "employee-name", + Tools.Property.builder() .type("string") - .description("The name of the employee, e.g. John Doe") + .description( + "The name of the employee.") .required(true) - .build()) - .withProperty("employee-address", - Tools.PromptFuncDefinition.Property - .builder() + .build(), + "employee-address", + Tools.Property.builder() .type("string") - .description("The address of the employee, Always eturns a random address. For example, Church St, Bengaluru, India") + .description( + "The address of the" + + " employee.") .required(true) - .build()) - .withProperty("employee-phone", - Tools.PromptFuncDefinition.Property - .builder() + .build(), + "employee-phone", + Tools.Property.builder() .type("string") - .description("The phone number of the employee. Always returns a random phone number. For example, 9911002233") + .description( + "The phone number of the" + + " employee.") .required(true) - .build()) - .build()) - .required(List.of("employee-name")) - .build()) + .build()))) .build()) - .build()) - .toolFunction(new ToolFunction() { - @Override - public Object apply(Map arguments) { - LOG.info("Invoking employee finder tool with arguments: {}", arguments); - String employeeName = arguments.get("employee-name").toString(); - String address = null; - String phone = null; - if (employeeName.equalsIgnoreCase("Rahul Kumar")) { - address = "Pune, Maharashtra, India"; - phone = "9911223344"; - } else { - address = "Karol Bagh, Delhi, India"; - phone = "9911002233"; - } - // perform DB operations here - return String.format( - "Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", - UUID.randomUUID(), employeeName, address, phone); - } - }).build(); + .toolFunction( + arguments -> { + String address = null; + String employeeName = null; + try { + employeeName = arguments.get("employee-name").toString(); + } catch (Exception e) { + employeeName = "Mr. LLoyd Llama"; + } + try { + address = arguments.get("employee-address").toString(); + } catch (Exception e) { + address = "Somewhere on earth."; + } + + Random random = new Random(); + long min = 1_000_000_000L; + long max = 9_999_999_999L; + String phone = + String.valueOf( + min + ((long) (random.nextDouble() * (max - min)))); + + return String.format( + "Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", + UUID.randomUUID(), employeeName, address, phone); + }) + .build(); } } diff --git a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java index 821a23e..e4a5fee 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java +++ b/src/test/java/io/github/ollama4j/integrationtests/WithAuth.java @@ -1,10 +1,33 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.integrationtests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.exceptions.OllamaBaseException; +import io.github.ollama4j.exceptions.OllamaException; +import io.github.ollama4j.models.generate.OllamaGenerateRequest; +import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder; +import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; import io.github.ollama4j.models.response.OllamaResult; import io.github.ollama4j.samples.AnnotatedTool; import io.github.ollama4j.tools.annotations.OllamaToolService; +import io.github.ollama4j.utils.OptionsBuilder; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.net.URISyntaxException; +import java.time.Duration; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; import org.junit.jupiter.api.Order; @@ -19,20 +42,14 @@ import org.testcontainers.ollama.OllamaContainer; import org.testcontainers.utility.DockerImageName; import org.testcontainers.utility.MountableFile; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.URISyntaxException; -import java.time.Duration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; - @OllamaToolService(providers = {AnnotatedTool.class}) @TestMethodOrder(OrderAnnotation.class) -@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection", "resource", "ResultOfMethodCallIgnored"}) +@SuppressWarnings({ + "HttpUrlsUsage", + "SpellCheckingInspection", + "resource", + "ResultOfMethodCallIgnored" +}) public class WithAuth { private static final Logger LOG = LoggerFactory.getLogger(WithAuth.class); @@ -42,8 +59,6 @@ public class WithAuth { private static final String NGINX_VERSION = "nginx:1.23.4-alpine"; private static final String BEARER_AUTH_TOKEN = "secret-token"; private static final String GENERAL_PURPOSE_MODEL = "gemma3:270m"; -// private static final String THINKING_MODEL = "gpt-oss:20b"; - private static OllamaContainer ollama; private static GenericContainer nginx; @@ -63,43 +78,48 @@ public class WithAuth { api.setRequestTimeoutSeconds(120); api.setNumberOfRetriesForModelPull(3); - String ollamaUrl = "http://" + ollama.getHost() + ":" + ollama.getMappedPort(OLLAMA_INTERNAL_PORT); + String ollamaUrl = + "http://" + ollama.getHost() + ":" + ollama.getMappedPort(OLLAMA_INTERNAL_PORT); String nginxUrl = "http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT); LOG.info( - "The Ollama service is now accessible via the Nginx proxy with bearer-auth authentication mode.\n" + - "→ Ollama URL: {}\n" + - "→ Proxy URL: {}", - ollamaUrl, nginxUrl - ); + "The Ollama service is now accessible via the Nginx proxy with bearer-auth" + + " authentication mode.\n" + + "→ Ollama URL: {}\n" + + "→ Proxy URL: {}", + ollamaUrl, + nginxUrl); LOG.info("OllamaAPI initialized with bearer auth token: {}", BEARER_AUTH_TOKEN); } private static OllamaContainer createOllamaContainer() { - return new OllamaContainer("ollama/ollama:" + OLLAMA_VERSION).withExposedPorts(OLLAMA_INTERNAL_PORT); + return new OllamaContainer("ollama/ollama:" + OLLAMA_VERSION) + .withExposedPorts(OLLAMA_INTERNAL_PORT); } private static String generateNginxConfig(int ollamaPort) { - return String.format("events {}\n" + - "\n" + - "http {\n" + - " server {\n" + - " listen 80;\n" + - "\n" + - " location / {\n" + - " set $auth_header $http_authorization;\n" + - "\n" + - " if ($auth_header != \"Bearer secret-token\") {\n" + - " return 401;\n" + - " }\n" + - "\n" + - " proxy_pass http://host.docker.internal:%s/;\n" + - " proxy_set_header Host $host;\n" + - " proxy_set_header X-Real-IP $remote_addr;\n" + - " proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n" + - " proxy_set_header X-Forwarded-Proto $scheme;\n" + - " }\n" + - " }\n" + - "}\n", ollamaPort); + return String.format( + "events {}\n" + + "\n" + + "http {\n" + + " server {\n" + + " listen 80;\n" + + "\n" + + " location / {\n" + + " set $auth_header $http_authorization;\n" + + "\n" + + " if ($auth_header != \"Bearer secret-token\") {\n" + + " return 401;\n" + + " }\n" + + "\n" + + " proxy_pass http://host.docker.internal:%s/;\n" + + " proxy_set_header Host $host;\n" + + " proxy_set_header X-Real-IP $remote_addr;\n" + + " proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n" + + " proxy_set_header X-Forwarded-Proto $scheme;\n" + + " }\n" + + " }\n" + + "}\n", + ollamaPort); } public static GenericContainer createNginxContainer(int ollamaPort) { @@ -117,14 +137,12 @@ public class WithAuth { .withExposedPorts(NGINX_PORT) .withCopyFileToContainer( MountableFile.forHostPath(nginxConf.getAbsolutePath()), - "/etc/nginx/nginx.conf" - ) + "/etc/nginx/nginx.conf") .withExtraHost("host.docker.internal", "host-gateway") .waitingFor( Wait.forHttp("/") .forStatusCode(401) - .withStartupTimeout(Duration.ofSeconds(30)) - ); + .withStartupTimeout(Duration.ofSeconds(30))); } catch (IOException e) { throw new RuntimeException("Failed to create nginx.conf", e); } @@ -134,45 +152,79 @@ public class WithAuth { @Order(1) void testOllamaBehindProxy() { api.setBearerAuth(BEARER_AUTH_TOKEN); - assertTrue(api.ping(), "Expected OllamaAPI to successfully ping through NGINX with valid auth token."); + try { + assertTrue( + api.ping(), + "Expected OllamaAPI to successfully ping through NGINX with valid auth token."); + } catch (Exception e) { + fail("Exception occurred while pinging OllamaAPI through NGINX: " + e.getMessage(), e); + } } @Test @Order(1) void testWithWrongToken() { api.setBearerAuth("wrong-token"); - assertFalse(api.ping(), "Expected OllamaAPI ping to fail through NGINX with an invalid auth token."); + try { + assertFalse( + api.ping(), + "Expected OllamaAPI ping to fail through NGINX with an invalid auth token."); + } catch (Exception e) { + // If an exception is thrown, that's also an expected failure for a wrong token + // (e.g., OllamaBaseException or IOException) + // Optionally, you can assert the type/message of the exception if needed + // For now, we treat any exception as a pass for this negative test + return; + } } @Test @Order(2) void testAskModelWithStructuredOutput() - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + throws OllamaException, IOException, InterruptedException, URISyntaxException { api.setBearerAuth(BEARER_AUTH_TOKEN); String model = GENERAL_PURPOSE_MODEL; api.pullModel(model); - String prompt = "The sun is shining brightly and is directly overhead at the zenith, casting my shadow over my foot, so it must be noon."; + String prompt = + "The sun is shining brightly and is directly overhead at the zenith, casting my" + + " shadow over my foot, so it must be noon."; Map format = new HashMap<>(); format.put("type", "object"); - format.put("properties", new HashMap() { - { - put("isNoon", new HashMap() { + format.put( + "properties", + new HashMap() { { - put("type", "boolean"); + put( + "isNoon", + new HashMap() { + { + put("type", "boolean"); + } + }); } }); - } - }); format.put("required", List.of("isNoon")); - OllamaResult result = api.generate(model, prompt, format); + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(model) + .withPrompt(prompt) + .withRaw(false) + .withThink(false) + .withStreaming(false) + .withImages(Collections.emptyList()) + .withOptions(new OptionsBuilder().build()) + .withFormat(format) + .build(); + OllamaGenerateStreamObserver handler = null; + OllamaResult result = api.generate(request, handler); assertNotNull(result); assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); - assertEquals(true, result.getStructuredResponse().get("isNoon")); + assertNotNull(result.getStructuredResponse().get("isNoon")); } } diff --git a/src/test/java/io/github/ollama4j/samples/AnnotatedTool.java b/src/test/java/io/github/ollama4j/samples/AnnotatedTool.java index 243a9fe..34f56b2 100644 --- a/src/test/java/io/github/ollama4j/samples/AnnotatedTool.java +++ b/src/test/java/io/github/ollama4j/samples/AnnotatedTool.java @@ -1,21 +1,37 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.samples; import io.github.ollama4j.tools.annotations.ToolProperty; import io.github.ollama4j.tools.annotations.ToolSpec; - import java.math.BigDecimal; +import java.util.Random; public class AnnotatedTool { @ToolSpec(desc = "Computes the most important constant all around the globe!") - public String computeImportantConstant(@ToolProperty(name = "noOfDigits", desc = "Number of digits that shall be returned") Integer noOfDigits) { - return BigDecimal.valueOf((long) (Math.random() * 1000000L), noOfDigits).toString(); + public String computeImportantConstant( + @ToolProperty(name = "noOfDigits", desc = "Number of digits that shall be returned") + Integer noOfDigits) { + return BigDecimal.valueOf((long) (new Random().nextLong() * 1000000L), noOfDigits) + .toString(); } @ToolSpec(desc = "Says hello to a friend!") - public String sayHello(@ToolProperty(name = "name", desc = "Name of the friend") String name, @ToolProperty(name = "numberOfHearts", desc = "number of heart emojis that should be used", required = false) Integer numberOfHearts) { + public String sayHello( + @ToolProperty(name = "name", desc = "Name of the friend") String name, + @ToolProperty( + name = "numberOfHearts", + desc = "number of heart emojis that should be used", + required = false) + Integer numberOfHearts) { String hearts = numberOfHearts != null ? "♡".repeat(numberOfHearts) : ""; return "Hello, " + name + "! " + hearts; } - } diff --git a/src/test/java/io/github/ollama4j/unittests/TestAnnotations.java b/src/test/java/io/github/ollama4j/unittests/TestAnnotations.java index 6f2d18c..4401253 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestAnnotations.java +++ b/src/test/java/io/github/ollama4j/unittests/TestAnnotations.java @@ -1,25 +1,32 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.tools.annotations.OllamaToolService; import io.github.ollama4j.tools.annotations.ToolProperty; import io.github.ollama4j.tools.annotations.ToolSpec; -import org.junit.jupiter.api.Test; - import java.lang.reflect.Method; import java.lang.reflect.Parameter; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; class TestAnnotations { @OllamaToolService(providers = {SampleProvider.class}) - static class SampleToolService { - } + static class SampleToolService {} static class SampleProvider { @ToolSpec(name = "sum", desc = "adds two numbers") - public int sum(@ToolProperty(name = "a", desc = "first addend") int a, - @ToolProperty(name = "b", desc = "second addend", required = false) int b) { + public int sum( + @ToolProperty(name = "a", desc = "first addend") int a, + @ToolProperty(name = "b", desc = "second addend", required = false) int b) { return a + b; } } @@ -28,7 +35,7 @@ class TestAnnotations { void testOllamaToolServiceProvidersPresent() throws Exception { OllamaToolService ann = SampleToolService.class.getAnnotation(OllamaToolService.class); assertNotNull(ann); - assertArrayEquals(new Class[]{SampleProvider.class}, ann.providers()); + assertArrayEquals(new Class[] {SampleProvider.class}, ann.providers()); } @Test diff --git a/src/test/java/io/github/ollama4j/unittests/TestAuth.java b/src/test/java/io/github/ollama4j/unittests/TestAuth.java index b618b51..0078509 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestAuth.java +++ b/src/test/java/io/github/ollama4j/unittests/TestAuth.java @@ -1,11 +1,20 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import io.github.ollama4j.models.request.BasicAuth; import io.github.ollama4j.models.request.BearerAuth; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; - class TestAuth { @Test diff --git a/src/test/java/io/github/ollama4j/unittests/TestBooleanToJsonFormatFlagSerializer.java b/src/test/java/io/github/ollama4j/unittests/TestBooleanToJsonFormatFlagSerializer.java index 7aeb915..cb1643a 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestBooleanToJsonFormatFlagSerializer.java +++ b/src/test/java/io/github/ollama4j/unittests/TestBooleanToJsonFormatFlagSerializer.java @@ -1,5 +1,15 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -8,8 +18,6 @@ import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer; import io.github.ollama4j.utils.Utils; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; - class TestBooleanToJsonFormatFlagSerializer { static class Holder { diff --git a/src/test/java/io/github/ollama4j/unittests/TestFileToBase64Serializer.java b/src/test/java/io/github/ollama4j/unittests/TestFileToBase64Serializer.java index 15b2298..e8a16f1 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestFileToBase64Serializer.java +++ b/src/test/java/io/github/ollama4j/unittests/TestFileToBase64Serializer.java @@ -1,15 +1,22 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import io.github.ollama4j.utils.FileToBase64Serializer; import io.github.ollama4j.utils.Utils; -import org.junit.jupiter.api.Test; - import java.util.List; - -import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; public class TestFileToBase64Serializer { diff --git a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java index f95a2dc..eaeb30b 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java +++ b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java @@ -1,40 +1,48 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; -import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.exceptions.OllamaBaseException; -import io.github.ollama4j.exceptions.RoleNotFoundException; -import io.github.ollama4j.models.chat.OllamaChatMessageRole; -import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; -import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; -import io.github.ollama4j.models.request.CustomModelRequest; -import io.github.ollama4j.models.response.ModelDetail; -import io.github.ollama4j.models.response.OllamaAsyncResultStreamer; -import io.github.ollama4j.models.response.OllamaResult; -import io.github.ollama4j.types.OllamaModelType; -import io.github.ollama4j.utils.OptionsBuilder; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.*; +import io.github.ollama4j.OllamaAPI; +import io.github.ollama4j.exceptions.OllamaException; +import io.github.ollama4j.exceptions.RoleNotFoundException; +import io.github.ollama4j.models.chat.OllamaChatMessageRole; +import io.github.ollama4j.models.embed.OllamaEmbedRequest; +import io.github.ollama4j.models.embed.OllamaEmbedResult; +import io.github.ollama4j.models.generate.OllamaGenerateRequest; +import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder; +import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; +import io.github.ollama4j.models.request.CustomModelRequest; +import io.github.ollama4j.models.response.ModelDetail; +import io.github.ollama4j.models.response.OllamaAsyncResultStreamer; +import io.github.ollama4j.models.response.OllamaResult; +import io.github.ollama4j.utils.OptionsBuilder; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + class TestMockedAPIs { @Test void testPullModel() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; try { doNothing().when(ollamaAPI).pullModel(model); ollamaAPI.pullModel(model); verify(ollamaAPI, times(1)).pullModel(model); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -46,7 +54,7 @@ class TestMockedAPIs { when(ollamaAPI.listModels()).thenReturn(new ArrayList<>()); ollamaAPI.listModels(); verify(ollamaAPI, times(1)).listModels(); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -54,12 +62,17 @@ class TestMockedAPIs { @Test void testCreateModel() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - CustomModelRequest customModelRequest = CustomModelRequest.builder().model("mario").from("llama3.2:latest").system("You are Mario from Super Mario Bros.").build(); + CustomModelRequest customModelRequest = + CustomModelRequest.builder() + .model("mario") + .from("llama3.2:latest") + .system("You are Mario from Super Mario Bros.") + .build(); try { doNothing().when(ollamaAPI).createModel(customModelRequest); ollamaAPI.createModel(customModelRequest); verify(ollamaAPI, times(1)).createModel(customModelRequest); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -67,12 +80,12 @@ class TestMockedAPIs { @Test void testDeleteModel() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; try { doNothing().when(ollamaAPI).deleteModel(model, true); ollamaAPI.deleteModel(model, true); verify(ollamaAPI, times(1)).deleteModel(model, true); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -80,12 +93,12 @@ class TestMockedAPIs { @Test void testGetModelDetails() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; try { when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail()); ollamaAPI.getModelDetails(model); verify(ollamaAPI, times(1)).getModelDetails(model); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -93,13 +106,16 @@ class TestMockedAPIs { @Test void testGenerateEmbeddings() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; String prompt = "some prompt text"; try { - when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>()); - ollamaAPI.generateEmbeddings(model, prompt); - verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt); - } catch (IOException | OllamaBaseException | InterruptedException e) { + OllamaEmbedRequest m = new OllamaEmbedRequest(); + m.setModel(model); + m.setInput(List.of(prompt)); + when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult()); + ollamaAPI.embed(m); + verify(ollamaAPI, times(1)).embed(m); + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -107,13 +123,14 @@ class TestMockedAPIs { @Test void testEmbed() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; List inputs = List.of("some prompt text"); try { - when(ollamaAPI.embed(model, inputs)).thenReturn(new OllamaEmbedResponseModel()); - ollamaAPI.embed(model, inputs); - verify(ollamaAPI, times(1)).embed(model, inputs); - } catch (IOException | OllamaBaseException | InterruptedException e) { + OllamaEmbedRequest m = new OllamaEmbedRequest(model, inputs); + when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult()); + ollamaAPI.embed(m); + verify(ollamaAPI, times(1)).embed(m); + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -121,13 +138,14 @@ class TestMockedAPIs { @Test void testEmbedWithEmbedRequestModel() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; List inputs = List.of("some prompt text"); try { - when(ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs))).thenReturn(new OllamaEmbedResponseModel()); - ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs)); - verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequestModel(model, inputs)); - } catch (IOException | OllamaBaseException | InterruptedException e) { + when(ollamaAPI.embed(new OllamaEmbedRequest(model, inputs))) + .thenReturn(new OllamaEmbedResult()); + ollamaAPI.embed(new OllamaEmbedRequest(model, inputs)); + verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequest(model, inputs)); + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -135,15 +153,23 @@ class TestMockedAPIs { @Test void testAsk() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; String prompt = "some prompt text"; - OptionsBuilder optionsBuilder = new OptionsBuilder(); + OllamaGenerateStreamObserver observer = new OllamaGenerateStreamObserver(null, null); try { - when(ollamaAPI.generate(model, prompt, false, false, optionsBuilder.build())) + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(model) + .withPrompt(prompt) + .withRaw(false) + .withThink(false) + .withStreaming(false) + .build(); + when(ollamaAPI.generate(request, observer)) .thenReturn(new OllamaResult("", "", 0, 200)); - ollamaAPI.generate(model, prompt, false, false, optionsBuilder.build()); - verify(ollamaAPI, times(1)).generate(model, prompt, false, false, optionsBuilder.build()); - } catch (IOException | OllamaBaseException | InterruptedException e) { + ollamaAPI.generate(request, observer); + verify(ollamaAPI, times(1)).generate(request, observer); + } catch (OllamaException e) { throw new RuntimeException(e); } } @@ -151,18 +177,25 @@ class TestMockedAPIs { @Test void testAskWithImageFiles() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; String prompt = "some prompt text"; try { - when(ollamaAPI.generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build())) - .thenReturn(new OllamaResult("", "", 0, 200)); - ollamaAPI.generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - verify(ollamaAPI, times(1)) - .generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - } catch (IOException | OllamaBaseException | InterruptedException e) { + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(model) + .withPrompt(prompt) + .withRaw(false) + .withThink(false) + .withStreaming(false) + .withImages(Collections.emptyList()) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .build(); + OllamaGenerateStreamObserver handler = null; + when(ollamaAPI.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200)); + ollamaAPI.generate(request, handler); + verify(ollamaAPI, times(1)).generate(request, handler); + } catch (Exception e) { throw new RuntimeException(e); } } @@ -170,26 +203,35 @@ class TestMockedAPIs { @Test void testAskWithImageURLs() { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; String prompt = "some prompt text"; try { - when(ollamaAPI.generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build())) - .thenReturn(new OllamaResult("", "", 0, 200)); - ollamaAPI.generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - verify(ollamaAPI, times(1)) - .generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + OllamaGenerateRequest request = + OllamaGenerateRequestBuilder.builder() + .withModel(model) + .withPrompt(prompt) + .withRaw(false) + .withThink(false) + .withStreaming(false) + .withImages(Collections.emptyList()) + .withOptions(new OptionsBuilder().build()) + .withFormat(null) + .build(); + OllamaGenerateStreamObserver handler = null; + when(ollamaAPI.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200)); + ollamaAPI.generate(request, handler); + verify(ollamaAPI, times(1)).generate(request, handler); + } catch (OllamaException e) { + throw new RuntimeException(e); + } catch (IOException e) { throw new RuntimeException(e); } } @Test - void testAskAsync() { + void testAskAsync() throws OllamaException { OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; + String model = "llama2"; String prompt = "some prompt text"; when(ollamaAPI.generateAsync(model, prompt, false, false)) .thenReturn(new OllamaAsyncResultStreamer(null, null, 3)); @@ -225,7 +267,8 @@ class TestMockedAPIs { OllamaAPI ollamaAPI = mock(OllamaAPI.class); String roleName = "non-existing-role"; try { - when(ollamaAPI.getRole(roleName)).thenThrow(new RoleNotFoundException("Role not found")); + when(ollamaAPI.getRole(roleName)) + .thenThrow(new RoleNotFoundException("Role not found")); } catch (RoleNotFoundException exception) { throw new RuntimeException("Failed to run test: testGetRoleNotFound"); } diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessage.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessage.java index 8e2bab6..b2b7925 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessage.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessage.java @@ -1,22 +1,33 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import io.github.ollama4j.models.chat.OllamaChatMessage; import io.github.ollama4j.models.chat.OllamaChatMessageRole; import org.json.JSONObject; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; - class TestOllamaChatMessage { @Test void testToStringProducesJson() { - OllamaChatMessage msg = new OllamaChatMessage(OllamaChatMessageRole.USER, "hello", null, null, null); + OllamaChatMessage msg = + new OllamaChatMessage(OllamaChatMessageRole.USER, "hello", null, null, null); String json = msg.toString(); JSONObject obj = new JSONObject(json); assertEquals("user", obj.getString("role")); assertEquals("hello", obj.getString("content")); assertTrue(obj.has("tool_calls")); - // thinking and images may or may not be present depending on null handling, just ensure no exception + // thinking and images may or may not be present depending on null handling, just ensure no + // exception } } diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessageRole.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessageRole.java index 6bdbc03..e53179b 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessageRole.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatMessageRole.java @@ -1,12 +1,19 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.exceptions.RoleNotFoundException; import io.github.ollama4j.models.chat.OllamaChatMessageRole; -import org.junit.jupiter.api.Test; - import java.util.List; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; class TestOllamaChatMessageRole { @@ -33,12 +40,14 @@ class TestOllamaChatMessageRole { void testCustomRoleCreationAndLookup() throws Exception { OllamaChatMessageRole custom = OllamaChatMessageRole.newCustomRole("myrole"); assertEquals("myrole", custom.toString()); - // custom roles are registered globally (per current implementation), so lookup should succeed + // custom roles are registered globally (per current implementation), so lookup should + // succeed assertSame(custom, OllamaChatMessageRole.getRole("myrole")); } @Test void testGetRoleThrowsOnUnknown() { - assertThrows(RoleNotFoundException.class, () -> OllamaChatMessageRole.getRole("does-not-exist")); + assertThrows( + RoleNotFoundException.class, () -> OllamaChatMessageRole.getRole("does-not-exist")); } } diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java index 20ab81c..7b069a6 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaChatRequestBuilder.java @@ -1,22 +1,29 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; -import io.github.ollama4j.models.chat.OllamaChatMessage; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.chat.OllamaChatRequest; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; import org.junit.jupiter.api.Test; -import java.util.Collections; - -import static org.junit.jupiter.api.Assertions.*; - class TestOllamaChatRequestBuilder { @Test void testResetClearsMessagesButKeepsModelAndThink() { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("my-model") - .withThinking(true) - .withMessage(OllamaChatMessageRole.USER, "first"); + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.builder() + .withModel("my-model") + .withThinking(true) + .withMessage(OllamaChatMessageRole.USER, "first"); OllamaChatRequest beforeReset = builder.build(); assertEquals("my-model", beforeReset.getModel()); @@ -30,20 +37,4 @@ class TestOllamaChatRequestBuilder { assertNotNull(afterReset.getMessages()); assertEquals(0, afterReset.getMessages().size()); } - - @Test - void testImageUrlFailuresAreIgnoredAndDoNotBreakBuild() { - // Provide clearly invalid URL, builder logs a warning and continues - OllamaChatRequest req = OllamaChatRequestBuilder.getInstance("m") - .withMessage(OllamaChatMessageRole.USER, "hi", Collections.emptyList(), - "ht!tp://invalid url \n not a uri") - .build(); - - assertNotNull(req.getMessages()); - assertEquals(1, req.getMessages().size()); - OllamaChatMessage msg = req.getMessages().get(0); - // images list will be initialized only if any valid URL was added; for invalid URL list can be null - // We just assert that builder didn't crash and message is present with content - assertEquals("hi", msg.getContent()); - } } diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaRequestBody.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaRequestBody.java index 204e1bc..38ac661 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOllamaRequestBody.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaRequestBody.java @@ -1,15 +1,22 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.assertEquals; + import io.github.ollama4j.utils.OllamaRequestBody; import io.github.ollama4j.utils.Utils; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.concurrent.Flow; - -import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; class TestOllamaRequestBody { @@ -30,29 +37,39 @@ class TestOllamaRequestBody { var publisher = req.getBodyPublisher(); StringBuilder data = new StringBuilder(); - publisher.subscribe(new Flow.Subscriber<>() { - @Override - public void onSubscribe(Flow.Subscription subscription) { - subscription.request(Long.MAX_VALUE); - } + publisher.subscribe( + new Flow.Subscriber<>() { + @Override + public void onSubscribe(Flow.Subscription subscription) { + subscription.request(Long.MAX_VALUE); + } - @Override - public void onNext(ByteBuffer item) { - data.append(StandardCharsets.UTF_8.decode(item)); - } + @Override + public void onNext(ByteBuffer item) { + data.append(StandardCharsets.UTF_8.decode(item)); + } - @Override - public void onError(Throwable throwable) { - } + @Override + // This method is intentionally left empty because, for this test, + // we do not expect any errors to occur during synchronous publishing. + // If an error does occur, the test will fail elsewhere. + public void onError(Throwable throwable) { + // No action needed for this test + } - @Override - public void onComplete() { - } - }); + @Override + public void onComplete() { + // This method is intentionally left empty because, for this test, + // we do not need to perform any action when the publishing completes. + // The assertion is performed after subscription, and no cleanup or + // further processing is required here. + } + }); // Trigger the publishing by converting it to a string via the same mapper for determinism String expected = Utils.getObjectMapper().writeValueAsString(req); - // Due to asynchronous nature, expected content already delivered synchronously by StringPublisher + // Due to asynchronous nature, expected content already delivered synchronously by + // StringPublisher assertEquals(expected, data.toString()); } } diff --git a/src/test/java/io/github/ollama4j/unittests/TestOllamaToolsResult.java b/src/test/java/io/github/ollama4j/unittests/TestOllamaToolsResult.java index 5ff36be..9e7f451 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOllamaToolsResult.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOllamaToolsResult.java @@ -1,15 +1,22 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.models.response.OllamaResult; import io.github.ollama4j.tools.OllamaToolsResult; import io.github.ollama4j.tools.ToolFunctionCallSpec; -import org.junit.jupiter.api.Test; - import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; public class TestOllamaToolsResult { diff --git a/src/test/java/io/github/ollama4j/unittests/TestOptionsAndUtils.java b/src/test/java/io/github/ollama4j/unittests/TestOptionsAndUtils.java index 63efc71..3973a08 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestOptionsAndUtils.java +++ b/src/test/java/io/github/ollama4j/unittests/TestOptionsAndUtils.java @@ -1,40 +1,48 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.utils.Options; import io.github.ollama4j.utils.OptionsBuilder; import io.github.ollama4j.utils.PromptBuilder; import io.github.ollama4j.utils.Utils; -import org.junit.jupiter.api.Test; - import java.io.File; import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; class TestOptionsAndUtils { @Test void testOptionsBuilderSetsValues() { - Options options = new OptionsBuilder() - .setMirostat(1) - .setMirostatEta(0.2f) - .setMirostatTau(4.5f) - .setNumCtx(1024) - .setNumGqa(8) - .setNumGpu(2) - .setNumThread(6) - .setRepeatLastN(32) - .setRepeatPenalty(1.2f) - .setTemperature(0.7f) - .setSeed(42) - .setStop("STOP") - .setTfsZ(1.5f) - .setNumPredict(256) - .setTopK(50) - .setTopP(0.95f) - .setMinP(0.05f) - .setCustomOption("custom_param", 123) - .build(); + Options options = + new OptionsBuilder() + .setMirostat(1) + .setMirostatEta(0.2f) + .setMirostatTau(4.5f) + .setNumCtx(1024) + .setNumGqa(8) + .setNumGpu(2) + .setNumThread(6) + .setRepeatLastN(32) + .setRepeatPenalty(1.2f) + .setTemperature(0.7f) + .setSeed(42) + .setStop("STOP") + .setTfsZ(1.5f) + .setNumPredict(256) + .setTopK(50) + .setTopP(0.95f) + .setMinP(0.05f) + .setCustomOption("custom_param", 123) + .build(); Map map = options.getOptionsMap(); assertEquals(1, map.get("mirostat")); @@ -59,20 +67,26 @@ class TestOptionsAndUtils { @Test void testOptionsBuilderRejectsUnsupportedCustomType() { - OptionsBuilder builder = new OptionsBuilder(); - assertThrows(IllegalArgumentException.class, () -> builder.setCustomOption("bad", new Object())); + assertThrows( + IllegalArgumentException.class, + () -> { + OptionsBuilder builder = new OptionsBuilder(); + builder.setCustomOption("bad", new Object()); + }); } @Test void testPromptBuilderBuildsExpectedString() { - String prompt = new PromptBuilder() - .add("Hello") - .addLine(", world!") - .addSeparator() - .add("Continue.") - .build(); + String prompt = + new PromptBuilder() + .add("Hello") + .addLine(", world!") + .addSeparator() + .add("Continue.") + .build(); - String expected = "Hello, world!\n\n--------------------------------------------------\nContinue."; + String expected = + "Hello, world!\n\n--------------------------------------------------\nContinue."; assertEquals(expected, prompt); } @@ -80,7 +94,8 @@ class TestOptionsAndUtils { void testUtilsGetObjectMapperSingletonAndModule() { assertSame(Utils.getObjectMapper(), Utils.getObjectMapper()); // Basic serialization sanity check with JavaTimeModule registered - assertDoesNotThrow(() -> Utils.getObjectMapper().writeValueAsString(java.time.OffsetDateTime.now())); + assertDoesNotThrow( + () -> Utils.getObjectMapper().writeValueAsString(java.time.OffsetDateTime.now())); } @Test diff --git a/src/test/java/io/github/ollama4j/unittests/TestReflectionalToolFunction.java b/src/test/java/io/github/ollama4j/unittests/TestReflectionalToolFunction.java index 9bd47a7..ca75691 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestReflectionalToolFunction.java +++ b/src/test/java/io/github/ollama4j/unittests/TestReflectionalToolFunction.java @@ -1,14 +1,21 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests; -import io.github.ollama4j.tools.ReflectionalToolFunction; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import io.github.ollama4j.tools.ReflectionalToolFunction; import java.lang.reflect.Method; import java.math.BigDecimal; import java.util.LinkedHashMap; import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; class TestReflectionalToolFunction { @@ -25,7 +32,9 @@ class TestReflectionalToolFunction { @Test void testApplyInvokesMethodWithTypeCasting() throws Exception { SampleToolHolder holder = new SampleToolHolder(); - Method method = SampleToolHolder.class.getMethod("combine", Integer.class, Boolean.class, BigDecimal.class, String.class); + Method method = + SampleToolHolder.class.getMethod( + "combine", Integer.class, Boolean.class, BigDecimal.class, String.class); LinkedHashMap propDef = new LinkedHashMap<>(); // preserve order to match method parameters @@ -36,12 +45,13 @@ class TestReflectionalToolFunction { ReflectionalToolFunction fn = new ReflectionalToolFunction(holder, method, propDef); - Map args = Map.of( - "i", "42", - "b", "true", - "d", "3.14", - "s", 123 // not a string; should be toString()'d by implementation - ); + Map args = + Map.of( + "i", "42", + "b", "true", + "d", "3.14", + "s", 123 // not a string; should be toString()'d by implementation + ); Object result = fn.apply(args); assertEquals("i=42,b=true,d=3.14,s=123", result); @@ -50,7 +60,9 @@ class TestReflectionalToolFunction { @Test void testTypeCastNullsWhenClassOrValueIsNull() throws Exception { SampleToolHolder holder = new SampleToolHolder(); - Method method = SampleToolHolder.class.getMethod("combine", Integer.class, Boolean.class, BigDecimal.class, String.class); + Method method = + SampleToolHolder.class.getMethod( + "combine", Integer.class, Boolean.class, BigDecimal.class, String.class); LinkedHashMap propDef = new LinkedHashMap<>(); propDef.put("i", null); // className null -> expect null passed diff --git a/src/test/java/io/github/ollama4j/unittests/TestToolRegistry.java b/src/test/java/io/github/ollama4j/unittests/TestToolRegistry.java deleted file mode 100644 index b4d20e1..0000000 --- a/src/test/java/io/github/ollama4j/unittests/TestToolRegistry.java +++ /dev/null @@ -1,48 +0,0 @@ -package io.github.ollama4j.unittests; - -import io.github.ollama4j.tools.ToolFunction; -import io.github.ollama4j.tools.ToolRegistry; -import io.github.ollama4j.tools.Tools; -import org.junit.jupiter.api.Test; - -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; - -class TestToolRegistry { - - @Test - void testAddAndGetToolFunction() { - ToolRegistry registry = new ToolRegistry(); - ToolFunction fn = args -> "ok:" + args.get("x"); - - Tools.ToolSpecification spec = Tools.ToolSpecification.builder() - .functionName("test") - .functionDescription("desc") - .toolFunction(fn) - .build(); - - registry.addTool("test", spec); - ToolFunction retrieved = registry.getToolFunction("test"); - assertNotNull(retrieved); - assertEquals("ok:42", retrieved.apply(Map.of("x", 42))); - } - - @Test - void testGetUnknownReturnsNull() { - ToolRegistry registry = new ToolRegistry(); - assertNull(registry.getToolFunction("nope")); - } - - @Test - void testClearRemovesAll() { - ToolRegistry registry = new ToolRegistry(); - registry.addTool("a", Tools.ToolSpecification.builder().toolFunction(args -> 1).build()); - registry.addTool("b", Tools.ToolSpecification.builder().toolFunction(args -> 2).build()); - assertFalse(registry.getRegisteredSpecs().isEmpty()); - registry.clear(); - assertTrue(registry.getRegisteredSpecs().isEmpty()); - assertNull(registry.getToolFunction("a")); - assertNull(registry.getToolFunction("b")); - } -} diff --git a/src/test/java/io/github/ollama4j/unittests/TestToolsPromptBuilder.java b/src/test/java/io/github/ollama4j/unittests/TestToolsPromptBuilder.java deleted file mode 100644 index 3b273e7..0000000 --- a/src/test/java/io/github/ollama4j/unittests/TestToolsPromptBuilder.java +++ /dev/null @@ -1,64 +0,0 @@ -package io.github.ollama4j.unittests; - -import com.fasterxml.jackson.core.JsonProcessingException; -import io.github.ollama4j.tools.Tools; -import org.junit.jupiter.api.Test; - -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; - -class TestToolsPromptBuilder { - - @Test - void testPromptBuilderIncludesToolsAndPrompt() throws JsonProcessingException { - Tools.PromptFuncDefinition.Property cityProp = Tools.PromptFuncDefinition.Property.builder() - .type("string") - .description("city name") - .required(true) - .build(); - - Tools.PromptFuncDefinition.Property unitsProp = Tools.PromptFuncDefinition.Property.builder() - .type("string") - .description("units") - .enumValues(List.of("metric", "imperial")) - .required(false) - .build(); - - Tools.PromptFuncDefinition.Parameters params = Tools.PromptFuncDefinition.Parameters.builder() - .type("object") - .properties(Map.of("city", cityProp, "units", unitsProp)) - .build(); - - Tools.PromptFuncDefinition.PromptFuncSpec spec = Tools.PromptFuncDefinition.PromptFuncSpec.builder() - .name("getWeather") - .description("Get weather for a city") - .parameters(params) - .build(); - - Tools.PromptFuncDefinition def = Tools.PromptFuncDefinition.builder() - .type("function") - .function(spec) - .build(); - - Tools.ToolSpecification toolSpec = Tools.ToolSpecification.builder() - .functionName("getWeather") - .functionDescription("Get weather for a city") - .toolPrompt(def) - .build(); - - Tools.PromptBuilder pb = new Tools.PromptBuilder() - .withToolSpecification(toolSpec) - .withPrompt("Tell me the weather."); - - String built = pb.build(); - assertTrue(built.contains("[AVAILABLE_TOOLS]")); - assertTrue(built.contains("[/AVAILABLE_TOOLS]")); - assertTrue(built.contains("[INST]")); - assertTrue(built.contains("Tell me the weather.")); - assertTrue(built.contains("\"name\":\"getWeather\"")); - assertTrue(built.contains("\"required\":[\"city\"]")); - assertTrue(built.contains("\"enum\":[\"metric\",\"imperial\"]")); - } -} diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/AbstractSerializationTest.java b/src/test/java/io/github/ollama4j/unittests/jackson/AbstractSerializationTest.java index 8476ca0..904b78e 100644 --- a/src/test/java/io/github/ollama4j/unittests/jackson/AbstractSerializationTest.java +++ b/src/test/java/io/github/ollama4j/unittests/jackson/AbstractSerializationTest.java @@ -1,12 +1,20 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests.jackson; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.github.ollama4j.utils.Utils; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; - public abstract class AbstractSerializationTest { protected ObjectMapper mapper = Utils.getObjectMapper(); @@ -29,8 +37,7 @@ public abstract class AbstractSerializationTest { } } - protected void assertEqualsAfterUnmarshalling(T unmarshalledObject, - T req) { + protected void assertEqualsAfterUnmarshalling(T unmarshalledObject, T req) { assertEquals(req, unmarshalledObject); } } diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java index 984bc22..ec6721b 100644 --- a/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -1,19 +1,26 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests.jackson; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; + import io.github.ollama4j.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.chat.OllamaChatRequest; import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; import io.github.ollama4j.utils.OptionsBuilder; -import org.json.JSONObject; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - import java.io.File; import java.util.Collections; import java.util.List; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrowsExactly; +import org.json.JSONObject; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestChatRequestSerialization extends AbstractSerializationTest { @@ -21,29 +28,36 @@ public class TestChatRequestSerialization extends AbstractSerializationTest { - OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") - .withOptions(b.setCustomOption("cust_obj", new Object()).build()) - .build(); - }); + assertThrowsExactly( + IllegalArgumentException.class, + () -> { + builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") + .withOptions(b.setCustomOption("cust_obj", new Object()).build()) + .build(); + }); } @Test public void testWithJsonFormat() { - OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") - .withGetJsonResponse().build(); + OllamaChatRequest req = + builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") + .withGetJsonResponse() + .build(); String jsonRequest = serialize(req); - // no jackson deserialization as format property is not boolean ==> omit as deserialization + // no jackson deserialization as format property is not boolean ==> omit as + // deserialization // of request is never used in real code anyways JSONObject jsonObject = new JSONObject(jsonRequest); String requestFormatProperty = jsonObject.getString("format"); @@ -108,8 +128,7 @@ public class TestChatRequestSerialization extends AbstractSerializationTest { +class TestEmbedRequestSerialization extends AbstractSerializationTest { private OllamaEmbedRequestBuilder builder; @@ -19,19 +27,18 @@ public class TestEmbedRequestSerialization extends AbstractSerializationTest { +class TestGenerateRequestSerialization extends AbstractSerializationTest { private OllamaGenerateRequestBuilder builder; @BeforeEach public void init() { - builder = OllamaGenerateRequestBuilder.getInstance("DummyModel"); + builder = OllamaGenerateRequestBuilder.builder().withModel("Dummy Model"); } @Test @@ -33,22 +41,22 @@ public class TestGenerateRequestSerialization extends AbstractSerializationTest< builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); String jsonRequest = serialize(req); - OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class); + OllamaGenerateRequest deserializeRequest = + deserialize(jsonRequest, OllamaGenerateRequest.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); } @Test public void testWithJsonFormat() { - OllamaGenerateRequest req = - builder.withPrompt("Some prompt").withGetJsonResponse().build(); + OllamaGenerateRequest req = builder.withPrompt("Some prompt").withGetJsonResponse().build(); String jsonRequest = serialize(req); + System.out.printf(jsonRequest); // no jackson deserialization as format property is not boolean ==> omit as deserialization // of request is never used in real code anyways JSONObject jsonObject = new JSONObject(jsonRequest); String requestFormatProperty = jsonObject.getString("format"); assertEquals("json", requestFormatProperty); } - } diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/TestModelPullResponseSerialization.java b/src/test/java/io/github/ollama4j/unittests/jackson/TestModelPullResponseSerialization.java index 59d601d..c981bf1 100644 --- a/src/test/java/io/github/ollama4j/unittests/jackson/TestModelPullResponseSerialization.java +++ b/src/test/java/io/github/ollama4j/unittests/jackson/TestModelPullResponseSerialization.java @@ -1,17 +1,25 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests.jackson; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.models.response.ModelPullResponse; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; - /** * Test serialization and deserialization of ModelPullResponse, * This test verifies that the ModelPullResponse class can properly parse * error responses from Ollama server that return HTTP 200 with error messages * in the JSON body. */ -public class TestModelPullResponseSerialization extends AbstractSerializationTest { +class TestModelPullResponseSerialization extends AbstractSerializationTest { /** * Test the specific error case reported in GitHub issue #138. @@ -20,7 +28,16 @@ public class TestModelPullResponseSerialization extends AbstractSerializationTes @Test public void testDeserializationWithErrorFromGitHubIssue138() { // This is the exact error JSON from GitHub issue #138 - String errorJson = "{\"error\":\"pull model manifest: 412: \\n\\nThe model you are attempting to pull requires a newer version of Ollama.\\n\\nPlease download the latest version at:\\n\\n\\thttps://ollama.com/download\\n\\n\"}"; + String errorJson = + "{\"error\":\"pull model manifest: 412: \\n" + + "\\n" + + "The model you are attempting to pull requires a newer version of Ollama.\\n" + + "\\n" + + "Please download the latest version at:\\n" + + "\\n" + + "\\thttps://ollama.com/download\\n" + + "\\n" + + "\"}"; ModelPullResponse response = deserialize(errorJson, ModelPullResponse.class); @@ -59,7 +76,9 @@ public class TestModelPullResponseSerialization extends AbstractSerializationTes */ @Test public void testDeserializationWithProgressFields() { - String progressJson = "{\"status\":\"pulling digestname\",\"digest\":\"sha256:abc123\",\"total\":2142590208,\"completed\":241970}"; + String progressJson = + "{\"status\":\"pulling" + + " digestname\",\"digest\":\"sha256:abc123\",\"total\":2142590208,\"completed\":241970}"; ModelPullResponse response = deserialize(progressJson, ModelPullResponse.class); @@ -95,7 +114,8 @@ public class TestModelPullResponseSerialization extends AbstractSerializationTes */ @Test public void testDeserializationWithAllFields() { - String completeJson = "{\"status\":\"downloading\",\"digest\":\"sha256:def456\",\"total\":1000000,\"completed\":500000,\"error\":null}"; + String completeJson = + "{\"status\":\"downloading\",\"digest\":\"sha256:def456\",\"total\":1000000,\"completed\":500000,\"error\":null}"; ModelPullResponse response = deserialize(completeJson, ModelPullResponse.class); @@ -115,7 +135,9 @@ public class TestModelPullResponseSerialization extends AbstractSerializationTes @Test public void testDeserializationWithUnknownFields() { // Test that unknown fields are ignored due to @JsonIgnoreProperties(ignoreUnknown = true) - String jsonWithUnknownFields = "{\"status\":\"pulling\",\"unknown_field\":\"should_be_ignored\",\"error\":\"test error\",\"another_unknown\":123,\"nested_unknown\":{\"key\":\"value\"}}"; + String jsonWithUnknownFields = + "{\"status\":\"pulling\",\"unknown_field\":\"should_be_ignored\",\"error\":\"test" + + " error\",\"another_unknown\":123,\"nested_unknown\":{\"key\":\"value\"}}"; ModelPullResponse response = deserialize(jsonWithUnknownFields, ModelPullResponse.class); @@ -227,21 +249,25 @@ public class TestModelPullResponseSerialization extends AbstractSerializationTes String errorJson = "{\"error\":\"test error\"}"; ModelPullResponse errorResponse = deserialize(errorJson, ModelPullResponse.class); - assertTrue(errorResponse.getError() != null && !errorResponse.getError().trim().isEmpty(), + assertTrue( + errorResponse.getError() != null && !errorResponse.getError().trim().isEmpty(), "Error response should trigger error handling logic"); // Normal case - should not trigger error handling String normalJson = "{\"status\":\"pulling\"}"; ModelPullResponse normalResponse = deserialize(normalJson, ModelPullResponse.class); - assertFalse(normalResponse.getError() != null && !normalResponse.getError().trim().isEmpty(), + assertFalse( + normalResponse.getError() != null && !normalResponse.getError().trim().isEmpty(), "Normal response should not trigger error handling logic"); // Empty error case - should not trigger error handling String emptyErrorJson = "{\"error\":\"\",\"status\":\"pulling\"}"; ModelPullResponse emptyErrorResponse = deserialize(emptyErrorJson, ModelPullResponse.class); - assertFalse(emptyErrorResponse.getError() != null && !emptyErrorResponse.getError().trim().isEmpty(), + assertFalse( + emptyErrorResponse.getError() != null + && !emptyErrorResponse.getError().trim().isEmpty(), "Empty error response should not trigger error handling logic"); } } diff --git a/src/test/java/io/github/ollama4j/unittests/jackson/TestModelRequestSerialization.java b/src/test/java/io/github/ollama4j/unittests/jackson/TestModelRequestSerialization.java index 961dd43..a48dc33 100644 --- a/src/test/java/io/github/ollama4j/unittests/jackson/TestModelRequestSerialization.java +++ b/src/test/java/io/github/ollama4j/unittests/jackson/TestModelRequestSerialization.java @@ -1,33 +1,45 @@ +/* + * Ollama4j - Java library for interacting with Ollama server. + * Copyright (c) 2025 Amith Koujalgi and contributors. + * + * Licensed under the MIT License (the "License"); + * you may not use this file except in compliance with the License. + * +*/ package io.github.ollama4j.unittests.jackson; +import static org.junit.jupiter.api.Assertions.*; + import io.github.ollama4j.models.response.Model; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; - public class TestModelRequestSerialization extends AbstractSerializationTest { @Test public void testDeserializationOfModelResponseWithOffsetTime() { - String serializedTestStringWithOffsetTime = "{\n" + - " \"name\": \"codellama:13b\",\n" + - " \"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n" + - " \"size\": 7365960935,\n" + - " \"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" + - " \"details\": {\n" + - " \"format\": \"gguf\",\n" + - " \"family\": \"llama\",\n" + - " \"families\": null,\n" + - " \"parameter_size\": \"13B\",\n" + - " \"quantization_level\": \"Q4_0\"\n" + - " }\n" + - "}"; + String serializedTestStringWithOffsetTime = + "{\n" + + " \"name\": \"codellama:13b\",\n" + + " \"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n" + + " \"size\": 7365960935,\n" + + " \"digest\":" + + " \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" + + " \"details\": {\n" + + " \"format\": \"gguf\",\n" + + " \"family\": \"llama\",\n" + + " \"families\": null,\n" + + " \"parameter_size\": \"13B\",\n" + + " \"quantization_level\": \"Q4_0\"\n" + + " }\n" + + "}"; Model model = deserialize(serializedTestStringWithOffsetTime, Model.class); assertNotNull(model); assertEquals("codellama:13b", model.getName()); assertEquals("2023-11-04T21:56:49.277302595Z", model.getModifiedAt().toString()); assertEquals(7365960935L, model.getSize()); - assertEquals("9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697", model.getDigest()); + assertEquals( + "9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697", + model.getDigest()); assertNotNull(model.getModelMeta()); assertEquals("gguf", model.getModelMeta().getFormat()); assertEquals("llama", model.getModelMeta().getFamily()); @@ -38,25 +50,29 @@ public class TestModelRequestSerialization extends AbstractSerializationTest - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} %msg%n + %d{HH:mm:ss.SSS} [%thread] %-5level %logger %msg%n - + + + + + + + + + diff --git a/src/test/resources/test-config.properties b/src/test/resources/test-config.properties index bfa0251..62f46dd 100644 --- a/src/test/resources/test-config.properties +++ b/src/test/resources/test-config.properties @@ -1,4 +1,4 @@ -ollama.url=http://localhost:11434 -ollama.model=llama3.2:1b -ollama.model.image=llava:latest -ollama.request-timeout-seconds=120 \ No newline at end of file +USE_EXTERNAL_OLLAMA_HOST=true +OLLAMA_HOST=http://192.168.29.229:11434/ +REQUEST_TIMEOUT_SECONDS=120 +NUMBER_RETRIES_FOR_MODEL_PULL=3 \ No newline at end of file