forked from Mirror/ollama4j
Compare commits
48 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a05052e095 | ||
|
|
10eb803e26 | ||
|
|
bd2da8fdda | ||
|
|
b0bb082bec | ||
|
|
81f564ef7f | ||
|
|
006b52f3db | ||
|
|
16634e60e4 | ||
|
|
db8b73075b | ||
|
|
dc9f79959a | ||
|
|
88f6d00763 | ||
|
|
fd3a989a49 | ||
|
|
7580c6a549 | ||
|
|
9e6503d84b | ||
|
|
ee21f7fdd8 | ||
|
|
ecc295f484 | ||
|
|
c528fef5fc | ||
|
|
38f1bda105 | ||
|
|
d8a703503a | ||
|
|
dd9ba7c937 | ||
|
|
cf52c9610c | ||
|
|
e8d709e99a | ||
|
|
51fbedad69 | ||
|
|
953605fa73 | ||
|
|
30bfdd9c6d | ||
|
|
91ee6cb4c1 | ||
|
|
8ef6fac28e | ||
|
|
d9e3860123 | ||
|
|
515d1f0399 | ||
|
|
be549430c5 | ||
|
|
4744315d45 | ||
|
|
8eea19a539 | ||
|
|
b5801d84e0 | ||
|
|
165d04b1bb | ||
|
|
16d2160b52 | ||
|
|
e39c47b8e1 | ||
|
|
bb0785140b | ||
|
|
e33ad1a1e3 | ||
|
|
cd60c506cb | ||
|
|
b55925df28 | ||
|
|
3a9b8c309d | ||
|
|
bf07159522 | ||
|
|
f8ca4d041d | ||
|
|
9c6a55f7b0 | ||
|
|
2866d83a2f | ||
|
|
45e5d07581 | ||
|
|
3a264cb6bb | ||
|
|
54edba144c | ||
|
|
9224d2da06 |
67
.github/workflows/maven-publish.yml
vendored
67
.github/workflows/maven-publish.yml
vendored
@@ -1,68 +1,41 @@
|
|||||||
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
||||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
||||||
|
|
||||||
name: Test and Publish Package
|
name: Release Artifacts
|
||||||
|
|
||||||
#on:
|
|
||||||
# release:
|
|
||||||
# types: [ "created" ]
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
branches: [ "main" ]
|
types: [ created ]
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up JDK 11
|
- name: Set up JDK 17
|
||||||
uses: actions/setup-java@v3
|
uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
java-version: '11'
|
java-version: '17'
|
||||||
distribution: 'adopt-hotspot'
|
distribution: 'temurin'
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
- name: Build with Maven
|
- name: Build with Maven
|
||||||
run: mvn --file pom.xml -U clean package -Punit-tests
|
run: mvn --file pom.xml -U clean package -Punit-tests
|
||||||
- name: Set up Apache Maven Central (Overwrite settings.xml)
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with: # running setup-java again overwrites the settings.xml
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
cache: 'maven'
|
|
||||||
server-id: ossrh
|
|
||||||
server-username: MAVEN_USERNAME
|
|
||||||
server-password: MAVEN_PASSWORD
|
|
||||||
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
|
||||||
gpg-passphrase: MAVEN_GPG_PASSPHRASE
|
|
||||||
- name: Set up Maven cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.m2/repository
|
|
||||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-maven-
|
|
||||||
- name: Build
|
|
||||||
run: mvn -B -ntp clean install
|
|
||||||
- name: Upload coverage reports to Codecov
|
|
||||||
uses: codecov/codecov-action@v3
|
|
||||||
env:
|
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
- name: Publish to GitHub Packages Apache Maven
|
- name: Publish to GitHub Packages Apache Maven
|
||||||
# if: >
|
run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml --file pom.xml
|
||||||
# github.event_name != 'pull_request' &&
|
|
||||||
# github.ref_name == 'main' &&
|
|
||||||
# contains(github.event.head_commit.message, 'release')
|
|
||||||
run: |
|
|
||||||
git config --global user.email "koujalgi.amith@gmail.com"
|
|
||||||
git config --global user.name "amithkoujalgi"
|
|
||||||
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
|
|
||||||
env:
|
env:
|
||||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
|
||||||
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
|
||||||
5
.github/workflows/publish-docs.yml
vendored
5
.github/workflows/publish-docs.yml
vendored
@@ -2,9 +2,8 @@
|
|||||||
name: Deploy Docs to GH Pages
|
name: Deploy Docs to GH Pages
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Runs on pushes targeting the default branch
|
release:
|
||||||
push:
|
types: [ created ]
|
||||||
branches: [ "main" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|||||||
52
.github/workflows/publish-javadoc.yml
vendored
52
.github/workflows/publish-javadoc.yml
vendored
@@ -1,52 +0,0 @@
|
|||||||
# Simple workflow for deploying static content to GitHub Pages
|
|
||||||
name: Deploy Javadoc content to Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Runs on pushes targeting the default branch
|
|
||||||
push:
|
|
||||||
branches: [ "none" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
packages: write
|
|
||||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
|
||||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
|
||||||
concurrency:
|
|
||||||
group: "pages"
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Single deploy job since we're just deploying
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
environment:
|
|
||||||
name: github-pages
|
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up JDK 11
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
|
||||||
- name: Build with Maven
|
|
||||||
run: mvn --file pom.xml -U clean package
|
|
||||||
- name: Setup Pages
|
|
||||||
uses: actions/configure-pages@v3
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-pages-artifact@v2
|
|
||||||
with:
|
|
||||||
# Upload entire repository
|
|
||||||
path: './target/apidocs/.'
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
id: deployment
|
|
||||||
uses: actions/deploy-pages@v2
|
|
||||||
125
README.md
125
README.md
@@ -1,3 +1,5 @@
|
|||||||
|
<div style="text-align: center">
|
||||||
|
|
||||||
### Ollama4j
|
### Ollama4j
|
||||||
|
|
||||||
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
||||||
@@ -9,23 +11,36 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
|
|||||||

|

|
||||||

|

|
||||||

|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||

|

|
||||||

|
|
||||||

|

|
||||||

|

|
||||||

|
|
||||||
|
|
||||||
[](https://codecov.io/gh/amithkoujalgi/ollama4j)
|
[](https://codecov.io/gh/amithkoujalgi/ollama4j)
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
- [How does it work?](#how-does-it-work)
|
- [How does it work?](#how-does-it-work)
|
||||||
- [Requirements](#requirements)
|
- [Requirements](#requirements)
|
||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
- [API Spec](#api-spec)
|
- [API Spec](https://amithkoujalgi.github.io/ollama4j/docs/category/apis---model-management)
|
||||||
- [Demo APIs](#try-out-the-apis-with-ollama-server)
|
- [Javadoc Spec](https://amithkoujalgi.github.io/ollama4j/apidocs/)
|
||||||
- [Development](#development)
|
- [Development](#development)
|
||||||
- [Contributions](#get-involved)
|
- [Contributions](#get-involved)
|
||||||
- [References](#references)
|
- [References](#references)
|
||||||
@@ -60,22 +75,88 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
|
|||||||
|
|
||||||
#### Installation
|
#### Installation
|
||||||
|
|
||||||
In your Maven project, add this dependency:
|
Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases).
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
##### For Maven
|
||||||
|
|
||||||
|
1. In your Maven project, add this dependency:
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.amithkoujalgi</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.57</version>
|
<version>v1.0.74</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
Latest release:
|
2. Add repository to your project's pom.xml:
|
||||||
|
|
||||||

|
```xml
|
||||||
|
|
||||||
[![][lib-shield]][lib]
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>github</id>
|
||||||
|
<name>GitHub Apache Maven Packages</name>
|
||||||
|
<url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url>
|
||||||
|
<releases>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</releases>
|
||||||
|
<snapshots>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</snapshots>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Add GitHub server to settings.xml. (Usually available at ~/.m2/settings.xml)
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||||
|
http://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||||
|
<servers>
|
||||||
|
<server>
|
||||||
|
<id>github</id>
|
||||||
|
<username>YOUR-USERNAME</username>
|
||||||
|
<password>YOUR-TOKEN</password>
|
||||||
|
</server>
|
||||||
|
</servers>
|
||||||
|
</settings>
|
||||||
|
```
|
||||||
|
|
||||||
|
##### For Gradle
|
||||||
|
|
||||||
|
In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL:
|
||||||
|
|
||||||
|
```kotlin
|
||||||
|
dependencies {
|
||||||
|
|
||||||
|
val ollama4jVersion = "1.0.74"
|
||||||
|
|
||||||
|
implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```groovy
|
||||||
|
dependencies {
|
||||||
|
implementation("io.github.amithkoujalgi:ollama4j:1.0.74")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[//]: # (Latest release:)
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ([![][lib-shield]][lib])
|
||||||
|
|
||||||
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
|
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
|
||||||
|
|
||||||
@@ -134,6 +215,9 @@ Actions CI workflow.
|
|||||||
- [x] Use lombok
|
- [x] Use lombok
|
||||||
- [x] Update request body creation with Java objects
|
- [x] Update request body creation with Java objects
|
||||||
- [ ] Async APIs for images
|
- [ ] Async APIs for images
|
||||||
|
- [ ] Support for function calling with models like Mistral
|
||||||
|
- [x] generate in sync mode
|
||||||
|
- [ ] generate in async mode
|
||||||
- [ ] Add custom headers to requests
|
- [ ] Add custom headers to requests
|
||||||
- [x] Add additional params for `ask` APIs such as:
|
- [x] Add additional params for `ask` APIs such as:
|
||||||
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
|
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
|
||||||
@@ -153,11 +237,28 @@ Contributions are most welcome! Whether it's reporting a bug, proposing an enhan
|
|||||||
with code - any sort
|
with code - any sort
|
||||||
of contribution is much appreciated.
|
of contribution is much appreciated.
|
||||||
|
|
||||||
|
### References
|
||||||
|
|
||||||
|
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
||||||
|
|
||||||
### Credits
|
### Credits
|
||||||
|
|
||||||
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
||||||
project.
|
project.
|
||||||
|
|
||||||
### References
|
|
||||||
|
|
||||||
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
<div style="text-align: center">
|
||||||
|
|
||||||
|
**Thanks to the amazing contributors**
|
||||||
|
|
||||||
|
<a href="https://github.com/amithkoujalgi/ollama4j/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=amithkoujalgi/ollama4j" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
### Appreciate my work?
|
||||||
|
|
||||||
|
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 2
|
sidebar_position: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate - Async
|
# Generate - Async
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate - With Image Files
|
# Generate - With Image Files
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 4
|
sidebar_position: 5
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate - With Image URLs
|
# Generate - With Image URLs
|
||||||
|
|||||||
271
docs/docs/apis-generate/generate-with-tools.md
Normal file
271
docs/docs/apis-generate/generate-with-tools.md
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate - With Tools
|
||||||
|
|
||||||
|
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
|
||||||
|
synchronous way.
|
||||||
|
This API correlates to
|
||||||
|
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
This is an only an experimental implementation and has a very basic design.
|
||||||
|
|
||||||
|
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
|
||||||
|
this
|
||||||
|
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Function Calling/Tools
|
||||||
|
|
||||||
|
Assume you want to call a method in your code based on the response generated from the model.
|
||||||
|
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
|
||||||
|
transaction from your database and respond to the user with the transaction details.
|
||||||
|
|
||||||
|
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
|
||||||
|
|
||||||
|
### Create Functions
|
||||||
|
|
||||||
|
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns a
|
||||||
|
value.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This function takes the argument `city` and performs an operation with the argument and returns a
|
||||||
|
value.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Define Tool Specifications
|
||||||
|
|
||||||
|
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
|
||||||
|
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDesc("Get current fuel price")
|
||||||
|
.props(
|
||||||
|
new MistralTools.PropsBuilder()
|
||||||
|
.withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` property (`city`).
|
||||||
|
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDesc("Get current weather")
|
||||||
|
.props(
|
||||||
|
new MistralTools.PropsBuilder()
|
||||||
|
.withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Register the Tools
|
||||||
|
|
||||||
|
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create prompt with Tools
|
||||||
|
|
||||||
|
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt1 = new MistralTools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, false, new OptionsBuilder().build());
|
||||||
|
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) {
|
||||||
|
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt2 = new MistralTools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, false, new OptionsBuilder().build());
|
||||||
|
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) {
|
||||||
|
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Again, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Response from tool 'current-weather']: Currently Bengaluru's weather is nice
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Full Example
|
||||||
|
|
||||||
|
```java
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.ToolDef;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.MistralTools;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class FunctionCallingWithMistral {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
|
||||||
|
String model = "mistral";
|
||||||
|
|
||||||
|
|
||||||
|
MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDesc("Get current fuel price")
|
||||||
|
.props(
|
||||||
|
new MistralTools.PropsBuilder()
|
||||||
|
.withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDesc("Get current weather")
|
||||||
|
.props(
|
||||||
|
new MistralTools.PropsBuilder()
|
||||||
|
.withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
|
||||||
|
String prompt1 = new MistralTools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
String prompt2 = new MistralTools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ask(ollamaAPI, model, prompt1);
|
||||||
|
ask(ollamaAPI, model, prompt2);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, false, new OptionsBuilder().build());
|
||||||
|
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) {
|
||||||
|
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SampleTools {
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Run this full example and you will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
[Response from tool 'current-weather']: Currently Bengaluru's weather is nice
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Room for improvement
|
||||||
|
|
||||||
|
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
|
||||||
|
registration. For example:
|
||||||
|
|
||||||
|
```java
|
||||||
|
|
||||||
|
@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
|
||||||
|
public String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
|
||||||
|
specific args separately with their data types. For example:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
public String getCurrentFuelPrice(String location, String fuelType) {
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Updating async/chat APIs with support for tool-based generation.
|
||||||
@@ -11,7 +11,7 @@ the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#gener
|
|||||||
Use the `OptionBuilder` to build the `Options` object
|
Use the `OptionBuilder` to build the `Options` object
|
||||||
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||||
Refer
|
Refer
|
||||||
to [this](/docs/apis-extras/options-builder).
|
to [this](/apis-extras/options-builder).
|
||||||
|
|
||||||
## Try asking a question about the model.
|
## Try asking a question about the model.
|
||||||
|
|
||||||
@@ -53,18 +53,19 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
// define a stream handler (Consumer<String>)
|
// define a stream handler (Consumer<String>)
|
||||||
OllamaStreamHandler streamHandler = (s) -> {
|
OllamaStreamHandler streamHandler = (s) -> {
|
||||||
System.out.println(s);
|
System.out.println(s);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Should be called using seperate thread to gain non blocking streaming effect.
|
// Should be called using seperate thread to gain non blocking streaming effect.
|
||||||
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().build(), streamHandler);
|
new OptionsBuilder().build(), streamHandler);
|
||||||
|
|
||||||
System.out.println("Full response: " +result.getResponse());
|
System.out.println("Full response: " + result.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> The
|
> The
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 5
|
sidebar_position: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
# Prompt Builder
|
# Prompt Builder
|
||||||
|
|||||||
@@ -40,6 +40,8 @@ const config = {
|
|||||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||||
({
|
({
|
||||||
docs: {
|
docs: {
|
||||||
|
path: 'docs',
|
||||||
|
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
|
||||||
sidebarPath: './sidebars.js',
|
sidebarPath: './sidebars.js',
|
||||||
// Please change this to your repo.
|
// Please change this to your repo.
|
||||||
// Remove this to remove the "edit this page" links.
|
// Remove this to remove the "edit this page" links.
|
||||||
@@ -96,7 +98,7 @@ const config = {
|
|||||||
items: [
|
items: [
|
||||||
{
|
{
|
||||||
label: 'Tutorial',
|
label: 'Tutorial',
|
||||||
to: '/docs/intro',
|
to: '/intro',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|||||||
1947
docs/package-lock.json
generated
1947
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -14,9 +14,9 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "3.0.1",
|
"@docusaurus/core": "^3.4.0",
|
||||||
"@docusaurus/preset-classic": "3.0.1",
|
"@docusaurus/preset-classic": "^3.4.0",
|
||||||
"@docusaurus/theme-mermaid": "^3.0.1",
|
"@docusaurus/theme-mermaid": "^3.4.0",
|
||||||
"@mdx-js/react": "^3.0.0",
|
"@mdx-js/react": "^3.0.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"prism-react-renderer": "^2.3.0",
|
"prism-react-renderer": "^2.3.0",
|
||||||
@@ -24,8 +24,8 @@
|
|||||||
"react-dom": "^18.0.0"
|
"react-dom": "^18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@docusaurus/module-type-aliases": "3.0.1",
|
"@docusaurus/module-type-aliases": "^3.4.0",
|
||||||
"@docusaurus/types": "3.0.1"
|
"@docusaurus/types": "^3.4.0"
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
"production": [
|
"production": [
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ function HomepageHeader() {
|
|||||||
<div className={styles.buttons}>
|
<div className={styles.buttons}>
|
||||||
<Link
|
<Link
|
||||||
className="button button--secondary button--lg"
|
className="button button--secondary button--lg"
|
||||||
to="/docs/intro">
|
to="/intro">
|
||||||
Getting Started
|
Getting Started
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
68
maven-publish.yml
Normal file
68
maven-publish.yml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
## This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
||||||
|
## For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
||||||
|
#
|
||||||
|
#name: Test and Publish Package
|
||||||
|
#
|
||||||
|
##on:
|
||||||
|
## release:
|
||||||
|
## types: [ "created" ]
|
||||||
|
#
|
||||||
|
#on:
|
||||||
|
# push:
|
||||||
|
# branches: [ "main" ]
|
||||||
|
# workflow_dispatch:
|
||||||
|
#
|
||||||
|
#jobs:
|
||||||
|
# build:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# permissions:
|
||||||
|
# contents: write
|
||||||
|
# packages: write
|
||||||
|
# steps:
|
||||||
|
# - uses: actions/checkout@v3
|
||||||
|
# - name: Set up JDK 11
|
||||||
|
# uses: actions/setup-java@v3
|
||||||
|
# with:
|
||||||
|
# java-version: '11'
|
||||||
|
# distribution: 'adopt-hotspot'
|
||||||
|
# server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||||
|
# settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||||
|
# - name: Build with Maven
|
||||||
|
# run: mvn --file pom.xml -U clean package -Punit-tests
|
||||||
|
# - name: Set up Apache Maven Central (Overwrite settings.xml)
|
||||||
|
# uses: actions/setup-java@v3
|
||||||
|
# with: # running setup-java again overwrites the settings.xml
|
||||||
|
# java-version: '11'
|
||||||
|
# distribution: 'adopt-hotspot'
|
||||||
|
# cache: 'maven'
|
||||||
|
# server-id: ossrh
|
||||||
|
# server-username: MAVEN_USERNAME
|
||||||
|
# server-password: MAVEN_PASSWORD
|
||||||
|
# gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
|
# gpg-passphrase: MAVEN_GPG_PASSPHRASE
|
||||||
|
# - name: Set up Maven cache
|
||||||
|
# uses: actions/cache@v3
|
||||||
|
# with:
|
||||||
|
# path: ~/.m2/repository
|
||||||
|
# key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||||
|
# restore-keys: |
|
||||||
|
# ${{ runner.os }}-maven-
|
||||||
|
# - name: Build
|
||||||
|
# run: mvn -B -ntp clean install
|
||||||
|
# - name: Upload coverage reports to Codecov
|
||||||
|
# uses: codecov/codecov-action@v3
|
||||||
|
# env:
|
||||||
|
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
# - name: Publish to GitHub Packages Apache Maven
|
||||||
|
# # if: >
|
||||||
|
# # github.event_name != 'pull_request' &&
|
||||||
|
# # github.ref_name == 'main' &&
|
||||||
|
# # contains(github.event.head_commit.message, 'release')
|
||||||
|
# run: |
|
||||||
|
# git config --global user.email "koujalgi.amith@gmail.com"
|
||||||
|
# git config --global user.name "amithkoujalgi"
|
||||||
|
# mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
|
||||||
|
# env:
|
||||||
|
# MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
||||||
|
# MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
||||||
|
# MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||||
115
pom.xml
115
pom.xml
@@ -1,14 +1,16 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.amithkoujalgi</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.70</version>
|
<version>ollama4j-revision</version>
|
||||||
|
|
||||||
<name>Ollama4j</name>
|
<name>Ollama4j</name>
|
||||||
<description>Java library for interacting with Ollama API.</description>
|
<description>Java library for interacting with Ollama API.</description>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<maven.compiler.source>11</maven.compiler.source>
|
<maven.compiler.source>11</maven.compiler.source>
|
||||||
@@ -39,7 +41,7 @@
|
|||||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
||||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||||
<tag>v1.0.70</tag>
|
<tag>v1.0.16</tag>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@@ -127,15 +129,15 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<!-- <plugin>-->
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
||||||
<artifactId>maven-release-plugin</artifactId>
|
<!-- <artifactId>maven-release-plugin</artifactId>-->
|
||||||
<version>3.0.1</version>
|
<!-- <version>3.0.1</version>-->
|
||||||
<configuration>
|
<!-- <configuration>-->
|
||||||
<!-- <goals>install</goals>-->
|
<!-- <!– <goals>install</goals>–>-->
|
||||||
<tagNameFormat>v@{project.version}</tagNameFormat>
|
<!-- <tagNameFormat>v@{project.version}</tagNameFormat>-->
|
||||||
</configuration>
|
<!-- </configuration>-->
|
||||||
</plugin>
|
<!-- </plugin>-->
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
@@ -159,7 +161,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-classic</artifactId>
|
<artifactId>logback-classic</artifactId>
|
||||||
<version>1.4.12</version>
|
<version>1.5.6</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -187,14 +189,23 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
<!-- <distributionManagement>-->
|
||||||
|
<!-- <snapshotRepository>-->
|
||||||
|
<!-- <id>ossrh</id>-->
|
||||||
|
<!-- <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>-->
|
||||||
|
<!-- </snapshotRepository>-->
|
||||||
|
<!-- <repository>-->
|
||||||
|
<!-- <id>ossrh</id>-->
|
||||||
|
<!-- <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>-->
|
||||||
|
<!-- </repository>-->
|
||||||
|
<!-- </distributionManagement>-->
|
||||||
|
|
||||||
|
<!-- Replaced publishing packages to GitHub Packages instead of Maven central -->
|
||||||
<distributionManagement>
|
<distributionManagement>
|
||||||
<snapshotRepository>
|
|
||||||
<id>ossrh</id>
|
|
||||||
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
|
|
||||||
</snapshotRepository>
|
|
||||||
<repository>
|
<repository>
|
||||||
<id>ossrh</id>
|
<id>github</id>
|
||||||
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
|
<name>GitHub Packages</name>
|
||||||
|
<url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url>
|
||||||
</repository>
|
</repository>
|
||||||
</distributionManagement>
|
</distributionManagement>
|
||||||
|
|
||||||
@@ -250,39 +261,39 @@
|
|||||||
</properties>
|
</properties>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<!-- <plugin>-->
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
||||||
<artifactId>maven-gpg-plugin</artifactId>
|
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
|
||||||
<version>3.1.0</version>
|
<!-- <version>3.1.0</version>-->
|
||||||
<executions>
|
<!-- <executions>-->
|
||||||
<execution>
|
<!-- <execution>-->
|
||||||
<id>sign-artifacts</id>
|
<!-- <id>sign-artifacts</id>-->
|
||||||
<phase>verify</phase>
|
<!-- <phase>verify</phase>-->
|
||||||
<goals>
|
<!-- <goals>-->
|
||||||
<goal>sign</goal>
|
<!-- <goal>sign</goal>-->
|
||||||
</goals>
|
<!-- </goals>-->
|
||||||
<configuration>
|
<!-- <configuration>-->
|
||||||
<!-- Prevent gpg from using pinentry programs. Fixes:
|
<!-- <!– Prevent gpg from using pinentry programs. Fixes:-->
|
||||||
gpg: signing failed: Inappropriate ioctl for device -->
|
<!-- gpg: signing failed: Inappropriate ioctl for device –>-->
|
||||||
<gpgArguments>
|
<!-- <gpgArguments>-->
|
||||||
<arg>--pinentry-mode</arg>
|
<!-- <arg>--pinentry-mode</arg>-->
|
||||||
<arg>loopback</arg>
|
<!-- <arg>loopback</arg>-->
|
||||||
</gpgArguments>
|
<!-- </gpgArguments>-->
|
||||||
</configuration>
|
<!-- </configuration>-->
|
||||||
</execution>
|
<!-- </execution>-->
|
||||||
</executions>
|
<!-- </executions>-->
|
||||||
</plugin>
|
<!-- </plugin>-->
|
||||||
<plugin>
|
<!-- <plugin>-->
|
||||||
<groupId>org.sonatype.plugins</groupId>
|
<!-- <groupId>org.sonatype.plugins</groupId>-->
|
||||||
<artifactId>nexus-staging-maven-plugin</artifactId>
|
<!-- <artifactId>nexus-staging-maven-plugin</artifactId>-->
|
||||||
<version>1.6.13</version>
|
<!-- <version>1.6.13</version>-->
|
||||||
<extensions>true</extensions>
|
<!-- <extensions>true</extensions>-->
|
||||||
<configuration>
|
<!-- <configuration>-->
|
||||||
<serverId>ossrh</serverId>
|
<!-- <serverId>ossrh</serverId>-->
|
||||||
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
|
<!-- <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>-->
|
||||||
<autoReleaseAfterClose>true</autoReleaseAfterClose>
|
<!-- <autoReleaseAfterClose>true</autoReleaseAfterClose>-->
|
||||||
</configuration>
|
<!-- </configuration>-->
|
||||||
</plugin>
|
<!-- </plugin>-->
|
||||||
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.jacoco</groupId>
|
<groupId>org.jacoco</groupId>
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingRe
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.*;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@@ -25,9 +26,7 @@ import java.net.http.HttpResponse;
|
|||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Base64;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The base Ollama API class.
|
* The base Ollama API class.
|
||||||
@@ -339,6 +338,7 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
||||||
* call.
|
* call.
|
||||||
@@ -351,9 +351,10 @@ public class OllamaAPI {
|
|||||||
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
|
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
|
||||||
* @return OllamaResult that includes response text and time taken for response
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
*/
|
*/
|
||||||
public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
|
public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
||||||
|
ollamaRequestModel.setRaw(raw);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
}
|
}
|
||||||
@@ -361,13 +362,37 @@ public class OllamaAPI {
|
|||||||
/**
|
/**
|
||||||
* Convenience method to call Ollama API without streaming responses.
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
* <p>
|
* <p>
|
||||||
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
|
* Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)}
|
||||||
|
*
|
||||||
|
* @param model Model to use
|
||||||
|
* @param prompt Prompt text
|
||||||
|
* @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
|
||||||
|
* @param options Additional Options
|
||||||
|
* @return OllamaResult
|
||||||
*/
|
*/
|
||||||
public OllamaResult generate(String model, String prompt, Options options)
|
public OllamaResult generate(String model, String prompt, boolean raw, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
return generate(model, prompt, options, null);
|
return generate(model, prompt, raw, options, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public OllamaToolsResult generateWithTools(String model, String prompt, boolean raw, Options options)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaToolsResult toolResult = new OllamaToolsResult();
|
||||||
|
Map<ToolDef, Object> toolResults = new HashMap<>();
|
||||||
|
|
||||||
|
OllamaResult result = generate(model, prompt, raw, options, null);
|
||||||
|
toolResult.setModelResult(result);
|
||||||
|
|
||||||
|
List<ToolDef> toolDefs = Utils.getObjectMapper().readValue(result.getResponse(), Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolDef.class));
|
||||||
|
for (ToolDef toolDef : toolDefs) {
|
||||||
|
toolResults.put(toolDef, invokeTool(toolDef));
|
||||||
|
}
|
||||||
|
toolResult.setToolResults(toolResults);
|
||||||
|
return toolResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate response for a question to a model running on Ollama server and get a callback handle
|
* Generate response for a question to a model running on Ollama server and get a callback handle
|
||||||
* that can be used to check for status and get the response from the model later. This would be
|
* that can be used to check for status and get the response from the model later. This would be
|
||||||
@@ -377,9 +402,9 @@ public class OllamaAPI {
|
|||||||
* @param prompt the prompt/question text
|
* @param prompt the prompt/question text
|
||||||
* @return the ollama async result callback handle
|
* @return the ollama async result callback handle
|
||||||
*/
|
*/
|
||||||
public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
|
public OllamaAsyncResultCallback generateAsync(String model, String prompt, boolean raw) {
|
||||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
||||||
|
ollamaRequestModel.setRaw(raw);
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
URI uri = URI.create(this.host + "/api/generate");
|
||||||
OllamaAsyncResultCallback ollamaAsyncResultCallback =
|
OllamaAsyncResultCallback ollamaAsyncResultCallback =
|
||||||
new OllamaAsyncResultCallback(
|
new OllamaAsyncResultCallback(
|
||||||
@@ -576,4 +601,24 @@ public class OllamaAPI {
|
|||||||
private boolean isBasicAuthCredentialsSet() {
|
private boolean isBasicAuthCredentialsSet() {
|
||||||
return basicAuth != null;
|
return basicAuth != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void registerTool(MistralTools.ToolSpecification toolSpecification) {
|
||||||
|
ToolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object invokeTool(ToolDef toolDef) {
|
||||||
|
try {
|
||||||
|
String methodName = toolDef.getName();
|
||||||
|
Map<String, Object> arguments = toolDef.getArguments();
|
||||||
|
DynamicFunction function = ToolRegistry.getFunction(methodName);
|
||||||
|
if (function == null) {
|
||||||
|
throw new IllegalArgumentException("No such tool: " + methodName);
|
||||||
|
}
|
||||||
|
return function.apply(arguments);
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return "Error calling tool: " + e.getMessage();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||||
|
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
@@ -13,9 +14,9 @@ public class Model {
|
|||||||
private String name;
|
private String name;
|
||||||
private String model;
|
private String model;
|
||||||
@JsonProperty("modified_at")
|
@JsonProperty("modified_at")
|
||||||
private LocalDateTime modifiedAt;
|
private OffsetDateTime modifiedAt;
|
||||||
@JsonProperty("expires_at")
|
@JsonProperty("expires_at")
|
||||||
private LocalDateTime expiresAt;
|
private OffsetDateTime expiresAt;
|
||||||
private String digest;
|
private String digest;
|
||||||
private long size;
|
private long size;
|
||||||
@JsonProperty("details")
|
@JsonProperty("details")
|
||||||
|
|||||||
@@ -1,9 +1,5 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
@@ -13,8 +9,12 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRespo
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
||||||
|
|
||||||
@@ -31,24 +31,22 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
try {
|
try {
|
||||||
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||||
if(streamObserver != null) {
|
if (streamObserver != null) {
|
||||||
streamObserver.notify(ollamaResponseModel);
|
streamObserver.notify(ollamaResponseModel);
|
||||||
}
|
}
|
||||||
return ollamaResponseModel.isDone();
|
return ollamaResponseModel.isDone();
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
LOG.error("Error parsing the Ollama chat response!",e);
|
LOG.error("Error parsing the Ollama chat response!", e);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
||||||
return super.callSync(body);
|
return super.callSync(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface DynamicFunction {
|
||||||
|
Object apply(Map<String, Object> arguments);
|
||||||
|
}
|
||||||
@@ -0,0 +1,139 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class MistralTools {
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class ToolSpecification {
|
||||||
|
private String functionName;
|
||||||
|
private String functionDesc;
|
||||||
|
private Map<String, PromptFuncDefinition.Property> props;
|
||||||
|
private DynamicFunction toolDefinition;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public static class PromptFuncDefinition {
|
||||||
|
private String type;
|
||||||
|
private PromptFuncSpec function;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class PromptFuncSpec {
|
||||||
|
private String name;
|
||||||
|
private String description;
|
||||||
|
private Parameters parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class Parameters {
|
||||||
|
private String type;
|
||||||
|
private Map<String, Property> properties;
|
||||||
|
private List<String> required;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class Property {
|
||||||
|
private String type;
|
||||||
|
private String description;
|
||||||
|
@JsonProperty("enum")
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
private List<String> enumValues;
|
||||||
|
@JsonIgnore
|
||||||
|
private boolean required;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PropsBuilder {
|
||||||
|
private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
|
||||||
|
|
||||||
|
public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
|
||||||
|
props.put(key, property);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, PromptFuncDefinition.Property> build() {
|
||||||
|
return props;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PromptBuilder {
|
||||||
|
private final List<PromptFuncDefinition> tools = new ArrayList<>();
|
||||||
|
|
||||||
|
private String promptText;
|
||||||
|
|
||||||
|
public String build() throws JsonProcessingException {
|
||||||
|
return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
|
||||||
|
promptText = prompt;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withToolSpecification(ToolSpecification spec) {
|
||||||
|
PromptFuncDefinition def = new PromptFuncDefinition();
|
||||||
|
def.setType("function");
|
||||||
|
|
||||||
|
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
|
||||||
|
functionDetail.setName(spec.getFunctionName());
|
||||||
|
functionDetail.setDescription(spec.getFunctionDesc());
|
||||||
|
|
||||||
|
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
|
||||||
|
parameters.setType("object");
|
||||||
|
parameters.setProperties(spec.getProps());
|
||||||
|
|
||||||
|
List<String> requiredValues = new ArrayList<>();
|
||||||
|
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProps().entrySet()) {
|
||||||
|
if (p.getValue().isRequired()) {
|
||||||
|
requiredValues.add(p.getKey());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parameters.setRequired(requiredValues);
|
||||||
|
functionDetail.setParameters(parameters);
|
||||||
|
def.setFunction(functionDetail);
|
||||||
|
|
||||||
|
tools.add(def);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
//
|
||||||
|
// public PromptBuilder withToolSpecification(String functionName, String functionDesc, Map<String, PromptFuncDefinition.Property> props) {
|
||||||
|
// PromptFuncDefinition def = new PromptFuncDefinition();
|
||||||
|
// def.setType("function");
|
||||||
|
//
|
||||||
|
// PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
|
||||||
|
// functionDetail.setName(functionName);
|
||||||
|
// functionDetail.setDescription(functionDesc);
|
||||||
|
//
|
||||||
|
// PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
|
||||||
|
// parameters.setType("object");
|
||||||
|
// parameters.setProperties(props);
|
||||||
|
//
|
||||||
|
// List<String> requiredValues = new ArrayList<>();
|
||||||
|
// for (Map.Entry<String, PromptFuncDefinition.Property> p : props.entrySet()) {
|
||||||
|
// if (p.getValue().isRequired()) {
|
||||||
|
// requiredValues.add(p.getKey());
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// parameters.setRequired(requiredValues);
|
||||||
|
// functionDetail.setParameters(parameters);
|
||||||
|
// def.setFunction(functionDetail);
|
||||||
|
//
|
||||||
|
// tools.add(def);
|
||||||
|
// return this;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class OllamaToolsResult {
|
||||||
|
private OllamaResult modelResult;
|
||||||
|
private Map<ToolDef, Object> toolResults;
|
||||||
|
}
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class ToolDef {
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
private Map<String, Object> arguments;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ToolRegistry {
|
||||||
|
private static final Map<String, DynamicFunction> functionMap = new HashMap<>();
|
||||||
|
|
||||||
|
|
||||||
|
public static DynamicFunction getFunction(String name) {
|
||||||
|
return functionMap.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void addFunction(String name, DynamicFunction function) {
|
||||||
|
functionMap.put(name, function);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,9 @@ package io.github.amithkoujalgi.ollama4j.core.types;
|
|||||||
@SuppressWarnings("ALL")
|
@SuppressWarnings("ALL")
|
||||||
public class OllamaModelType {
|
public class OllamaModelType {
|
||||||
public static final String GEMMA = "gemma";
|
public static final String GEMMA = "gemma";
|
||||||
|
public static final String GEMMA2 = "gemma2";
|
||||||
|
|
||||||
|
|
||||||
public static final String LLAMA2 = "llama2";
|
public static final String LLAMA2 = "llama2";
|
||||||
public static final String LLAMA3 = "llama3";
|
public static final String LLAMA3 = "llama3";
|
||||||
public static final String MISTRAL = "mistral";
|
public static final String MISTRAL = "mistral";
|
||||||
@@ -30,6 +33,8 @@ public class OllamaModelType {
|
|||||||
public static final String ZEPHYR = "zephyr";
|
public static final String ZEPHYR = "zephyr";
|
||||||
public static final String OPENHERMES = "openhermes";
|
public static final String OPENHERMES = "openhermes";
|
||||||
public static final String QWEN = "qwen";
|
public static final String QWEN = "qwen";
|
||||||
|
|
||||||
|
public static final String QWEN2 = "qwen2";
|
||||||
public static final String WIZARDCODER = "wizardcoder";
|
public static final String WIZARDCODER = "wizardcoder";
|
||||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||||
public static final String TINYLLAMA = "tinyllama";
|
public static final String TINYLLAMA = "tinyllama";
|
||||||
@@ -79,4 +84,5 @@ public class OllamaModelType {
|
|||||||
public static final String NOTUS = "notus";
|
public static final String NOTUS = "notus";
|
||||||
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
||||||
public static final String ALL_MINILM = "all-minilm";
|
public static final String ALL_MINILM = "all-minilm";
|
||||||
|
public static final String CODESTRAL = "codestral";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.integrationtests;
|
package io.github.amithkoujalgi.ollama4j.integrationtests;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
||||||
@@ -10,9 +8,16 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
import lombok.Data;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Order;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@@ -22,372 +27,369 @@ import java.net.http.HttpConnectTimeoutException;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import lombok.Data;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
import org.junit.jupiter.api.Order;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
class TestRealAPIs {
|
class TestRealAPIs {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
|
||||||
|
|
||||||
OllamaAPI ollamaAPI;
|
OllamaAPI ollamaAPI;
|
||||||
Config config;
|
Config config;
|
||||||
|
|
||||||
private File getImageFileFromClasspath(String fileName) {
|
private File getImageFileFromClasspath(String fileName) {
|
||||||
ClassLoader classLoader = getClass().getClassLoader();
|
ClassLoader classLoader = getClass().getClassLoader();
|
||||||
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
|
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
void setUp() {
|
|
||||||
config = new Config();
|
|
||||||
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
|
||||||
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1)
|
|
||||||
void testWrongEndpoint() {
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
|
|
||||||
assertThrows(ConnectException.class, ollamaAPI::listModels);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1)
|
|
||||||
void testEndpointReachability() {
|
|
||||||
try {
|
|
||||||
assertNotNull(ollamaAPI.listModels());
|
|
||||||
} catch (HttpConnectTimeoutException e) {
|
|
||||||
fail(e.getMessage());
|
|
||||||
} catch (Exception e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@BeforeEach
|
||||||
@Order(2)
|
void setUp() {
|
||||||
void testListModels() {
|
config = new Config();
|
||||||
testEndpointReachability();
|
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
||||||
try {
|
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
||||||
assertNotNull(ollamaAPI.listModels());
|
|
||||||
ollamaAPI.listModels().forEach(System.out::println);
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2)
|
@Order(1)
|
||||||
void testPullModel() {
|
void testWrongEndpoint() {
|
||||||
testEndpointReachability();
|
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
|
||||||
try {
|
assertThrows(ConnectException.class, ollamaAPI::listModels);
|
||||||
ollamaAPI.pullModel(config.getModel());
|
|
||||||
boolean found =
|
|
||||||
ollamaAPI.listModels().stream()
|
|
||||||
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
|
||||||
assertTrue(found);
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(1)
|
||||||
void testListDtails() {
|
void testEndpointReachability() {
|
||||||
testEndpointReachability();
|
try {
|
||||||
try {
|
assertNotNull(ollamaAPI.listModels());
|
||||||
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
|
} catch (HttpConnectTimeoutException e) {
|
||||||
assertNotNull(modelDetails);
|
fail(e.getMessage());
|
||||||
System.out.println(modelDetails);
|
} catch (Exception e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(2)
|
||||||
void testAskModelWithDefaultOptions() {
|
void testListModels() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
assertNotNull(ollamaAPI.listModels());
|
||||||
ollamaAPI.generate(
|
ollamaAPI.listModels().forEach(System.out::println);
|
||||||
config.getModel(),
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
fail(e);
|
||||||
new OptionsBuilder().build());
|
}
|
||||||
assertNotNull(result);
|
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(2)
|
||||||
void testAskModelWithDefaultOptionsStreamed() {
|
void testPullModel() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
|
ollamaAPI.pullModel(config.getModel());
|
||||||
StringBuffer sb = new StringBuffer("");
|
boolean found =
|
||||||
|
ollamaAPI.listModels().stream()
|
||||||
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
assertTrue(found);
|
||||||
new OptionsBuilder().build(), (s) -> {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
LOG.info(s);
|
fail(e);
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
}
|
||||||
LOG.info(substring);
|
|
||||||
sb.append(substring);
|
|
||||||
});
|
|
||||||
|
|
||||||
assertNotNull(result);
|
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptions() {
|
void testListDtails() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
|
||||||
ollamaAPI.generate(
|
assertNotNull(modelDetails);
|
||||||
config.getModel(),
|
System.out.println(modelDetails);
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
new OptionsBuilder().setTemperature(0.9f).build());
|
fail(e);
|
||||||
assertNotNull(result);
|
}
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChat() {
|
void testAskModelWithDefaultOptions() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
OllamaResult result =
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
ollamaAPI.generate(
|
||||||
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
|
config.getModel(),
|
||||||
.withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
.build();
|
false,
|
||||||
|
new OptionsBuilder().build());
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
assertNotNull(result);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(result.getResponse());
|
||||||
assertFalse(chatResult.getResponse().isBlank());
|
assertFalse(result.getResponse().isEmpty());
|
||||||
assertEquals(4,chatResult.getChatHistory().size());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithSystemPrompt() {
|
void testAskModelWithDefaultOptionsStreamed() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
StringBuffer sb = new StringBuffer("");
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
|
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||||
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
.withMessage(OllamaChatMessageRole.USER,
|
false,
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
new OptionsBuilder().build(), (s) -> {
|
||||||
.build();
|
LOG.info(s);
|
||||||
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
|
LOG.info(substring);
|
||||||
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
assertNotNull(result);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(result.getResponse());
|
||||||
assertFalse(chatResult.getResponse().isBlank());
|
assertFalse(result.getResponse().isEmpty());
|
||||||
assertTrue(chatResult.getResponse().startsWith("NI"));
|
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
||||||
assertEquals(3, chatResult.getChatHistory().size());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithStream() {
|
void testAskModelWithOptions() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
OllamaResult result =
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
ollamaAPI.generate(
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
config.getModel(),
|
||||||
.build();
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
|
true,
|
||||||
StringBuffer sb = new StringBuffer("");
|
new OptionsBuilder().setTemperature(0.9f).build());
|
||||||
|
assertNotNull(result);
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
|
assertNotNull(result.getResponse());
|
||||||
LOG.info(s);
|
assertFalse(result.getResponse().isEmpty());
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
LOG.info(substring);
|
fail(e);
|
||||||
sb.append(substring);
|
}
|
||||||
});
|
|
||||||
assertNotNull(chatResult);
|
|
||||||
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithImageFromFileWithHistoryRecognition() {
|
void testChat() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder =
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||||
OllamaChatRequestModel requestModel =
|
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
|
||||||
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
.withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
|
||||||
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
.build();
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(chatResult);
|
||||||
assertNotNull(chatResult.getResponse());
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
|
assertEquals(4, chatResult.getChatHistory().size());
|
||||||
builder.reset();
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
requestModel =
|
}
|
||||||
builder.withMessages(chatResult.getChatHistory())
|
|
||||||
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
|
||||||
|
|
||||||
chatResult = ollamaAPI.chat(requestModel);
|
|
||||||
assertNotNull(chatResult);
|
|
||||||
assertNotNull(chatResult.getResponse());
|
|
||||||
|
|
||||||
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithImageFromURL() {
|
void testChatWithSystemPrompt() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
|
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
.build();
|
.withMessage(OllamaChatMessageRole.USER,
|
||||||
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(chatResult);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
fail(e);
|
assertTrue(chatResult.getResponse().startsWith("NI"));
|
||||||
|
assertEquals(3, chatResult.getChatHistory().size());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageFiles() {
|
void testChatWithStream() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
try {
|
||||||
try {
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaResult result =
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
||||||
ollamaAPI.generateWithImageFiles(
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
config.getImageModel(),
|
.build();
|
||||||
"What is in this image?",
|
|
||||||
List.of(imageFile),
|
StringBuffer sb = new StringBuffer("");
|
||||||
new OptionsBuilder().build());
|
|
||||||
assertNotNull(result);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> {
|
||||||
assertNotNull(result.getResponse());
|
LOG.info(s);
|
||||||
assertFalse(result.getResponse().isEmpty());
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
LOG.info(substring);
|
||||||
fail(e);
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
assertNotNull(chatResult);
|
||||||
|
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageFilesStreamed() {
|
void testChatWithImageFromFileWithHistoryRecognition() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
try {
|
||||||
try {
|
OllamaChatRequestBuilder builder =
|
||||||
StringBuffer sb = new StringBuffer("");
|
OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
||||||
|
OllamaChatRequestModel requestModel =
|
||||||
|
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
|
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
|
assertNotNull(chatResult);
|
||||||
LOG.info(s);
|
assertNotNull(chatResult.getResponse());
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
|
||||||
LOG.info(substring);
|
builder.reset();
|
||||||
sb.append(substring);
|
|
||||||
});
|
requestModel =
|
||||||
assertNotNull(result);
|
builder.withMessages(chatResult.getChatHistory())
|
||||||
assertNotNull(result.getResponse());
|
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertNotNull(chatResult);
|
||||||
fail(e);
|
assertNotNull(chatResult.getResponse());
|
||||||
|
|
||||||
|
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageURLs() {
|
void testChatWithImageFromURL() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
||||||
ollamaAPI.generateWithImageURLs(
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
config.getImageModel(),
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
|
||||||
"What is in this image?",
|
.build();
|
||||||
List.of(
|
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
new OptionsBuilder().build());
|
assertNotNull(chatResult);
|
||||||
assertNotNull(result);
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
assertNotNull(result.getResponse());
|
fail(e);
|
||||||
assertFalse(result.getResponse().isEmpty());
|
}
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
public void testEmbedding() {
|
void testAskModelWithOptionsAndImageFiles() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||||
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
|
try {
|
||||||
.getInstance(config.getModel(), "What is the capital of France?").build();
|
OllamaResult result =
|
||||||
|
ollamaAPI.generateWithImageFiles(
|
||||||
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
|
config.getImageModel(),
|
||||||
|
"What is in this image?",
|
||||||
assertNotNull(embeddings);
|
List.of(imageFile),
|
||||||
assertFalse(embeddings.isEmpty());
|
new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertNotNull(result);
|
||||||
fail(e);
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testAskModelWithOptionsAndImageFilesStreamed() {
|
||||||
|
testEndpointReachability();
|
||||||
|
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||||
|
try {
|
||||||
|
StringBuffer sb = new StringBuffer("");
|
||||||
|
|
||||||
|
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
|
||||||
|
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
|
||||||
|
LOG.info(s);
|
||||||
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
|
LOG.info(substring);
|
||||||
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testAskModelWithOptionsAndImageURLs() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaResult result =
|
||||||
|
ollamaAPI.generateWithImageURLs(
|
||||||
|
config.getImageModel(),
|
||||||
|
"What is in this image?",
|
||||||
|
List.of(
|
||||||
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
||||||
|
new OptionsBuilder().build());
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
public void testEmbedding() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
|
||||||
|
.getInstance(config.getModel(), "What is the capital of France?").build();
|
||||||
|
|
||||||
|
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
|
||||||
|
|
||||||
|
assertNotNull(embeddings);
|
||||||
|
assertFalse(embeddings.isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
class Config {
|
class Config {
|
||||||
private String ollamaURL;
|
private String ollamaURL;
|
||||||
private String model;
|
private String model;
|
||||||
private String imageModel;
|
private String imageModel;
|
||||||
private int requestTimeoutSeconds;
|
private int requestTimeoutSeconds;
|
||||||
|
|
||||||
public Config() {
|
public Config() {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
try (InputStream input =
|
try (InputStream input =
|
||||||
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
||||||
if (input == null) {
|
if (input == null) {
|
||||||
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
||||||
}
|
}
|
||||||
properties.load(input);
|
properties.load(input);
|
||||||
this.ollamaURL = properties.getProperty("ollama.url");
|
this.ollamaURL = properties.getProperty("ollama.url");
|
||||||
this.model = properties.getProperty("ollama.model");
|
this.model = properties.getProperty("ollama.model");
|
||||||
this.imageModel = properties.getProperty("ollama.model.image");
|
this.imageModel = properties.getProperty("ollama.model.image");
|
||||||
this.requestTimeoutSeconds =
|
this.requestTimeoutSeconds =
|
||||||
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException("Error loading properties", e);
|
throw new RuntimeException("Error loading properties", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.unittests;
|
package io.github.amithkoujalgi.ollama4j.unittests;
|
||||||
|
|
||||||
import static org.mockito.Mockito.*;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
||||||
@@ -9,155 +7,158 @@ import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.mockito.Mockito;
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
class TestMockedAPIs {
|
class TestMockedAPIs {
|
||||||
@Test
|
@Test
|
||||||
void testPullModel() {
|
void testPullModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).pullModel(model);
|
doNothing().when(ollamaAPI).pullModel(model);
|
||||||
ollamaAPI.pullModel(model);
|
ollamaAPI.pullModel(model);
|
||||||
verify(ollamaAPI, times(1)).pullModel(model);
|
verify(ollamaAPI, times(1)).pullModel(model);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testListModels() {
|
void testListModels() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
|
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
|
||||||
ollamaAPI.listModels();
|
ollamaAPI.listModels();
|
||||||
verify(ollamaAPI, times(1)).listModels();
|
verify(ollamaAPI, times(1)).listModels();
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testCreateModel() {
|
void testCreateModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
|
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
|
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
|
||||||
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
|
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
|
||||||
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
|
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testDeleteModel() {
|
void testDeleteModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).deleteModel(model, true);
|
doNothing().when(ollamaAPI).deleteModel(model, true);
|
||||||
ollamaAPI.deleteModel(model, true);
|
ollamaAPI.deleteModel(model, true);
|
||||||
verify(ollamaAPI, times(1)).deleteModel(model, true);
|
verify(ollamaAPI, times(1)).deleteModel(model, true);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testGetModelDetails() {
|
void testGetModelDetails() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
|
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
|
||||||
ollamaAPI.getModelDetails(model);
|
ollamaAPI.getModelDetails(model);
|
||||||
verify(ollamaAPI, times(1)).getModelDetails(model);
|
verify(ollamaAPI, times(1)).getModelDetails(model);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testGenerateEmbeddings() {
|
void testGenerateEmbeddings() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
|
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
|
||||||
ollamaAPI.generateEmbeddings(model, prompt);
|
ollamaAPI.generateEmbeddings(model, prompt);
|
||||||
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
|
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAsk() {
|
void testAsk() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
|
when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generate(model, prompt, optionsBuilder.build());
|
ollamaAPI.generate(model, prompt, false, optionsBuilder.build());
|
||||||
verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
|
verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskWithImageFiles() {
|
void testAskWithImageFiles() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateWithImageFiles(
|
when(ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generateWithImageFiles(
|
ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.generateWithImageFiles(
|
.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskWithImageURLs() {
|
void testAskWithImageURLs() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateWithImageURLs(
|
when(ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generateWithImageURLs(
|
ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.generateWithImageURLs(
|
.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskAsync() {
|
void testAskAsync() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
when(ollamaAPI.generateAsync(model, prompt))
|
when(ollamaAPI.generateAsync(model, prompt, false))
|
||||||
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
||||||
ollamaAPI.generateAsync(model, prompt);
|
ollamaAPI.generateAsync(model, prompt, false);
|
||||||
verify(ollamaAPI, times(1)).generateAsync(model, prompt);
|
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,30 +6,30 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
|
||||||
public abstract class AbstractRequestSerializationTest<T> {
|
public abstract class AbstractSerializationTest<T> {
|
||||||
|
|
||||||
protected ObjectMapper mapper = Utils.getObjectMapper();
|
protected ObjectMapper mapper = Utils.getObjectMapper();
|
||||||
|
|
||||||
protected String serializeRequest(T req) {
|
protected String serialize(T obj) {
|
||||||
try {
|
try {
|
||||||
return mapper.writeValueAsString(req);
|
return mapper.writeValueAsString(obj);
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
fail("Could not serialize request!", e);
|
fail("Could not serialize request!", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
|
protected T deserialize(String jsonObject, Class<T> deserializationClass) {
|
||||||
try {
|
try {
|
||||||
return mapper.readValue(jsonRequest, requestClass);
|
return mapper.readValue(jsonObject, deserializationClass);
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
fail("Could not deserialize jsonRequest!", e);
|
fail("Could not deserialize jsonObject!", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
|
protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
|
||||||
T req) {
|
T req) {
|
||||||
assertEquals(req, unmarshalledRequest);
|
assertEquals(req, unmarshalledObject);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -14,7 +14,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilde
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
|
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
|
||||||
|
|
||||||
private OllamaChatRequestBuilder builder;
|
private OllamaChatRequestBuilder builder;
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
@Test
|
@Test
|
||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -35,16 +35,16 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
|
||||||
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||||
.build();
|
.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRequestWithMessageAndImage() {
|
public void testRequestWithMessageAndImage() {
|
||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
|
||||||
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
|
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -61,8 +61,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
.withOptions(b.setTopP(1).build())
|
.withOptions(b.setTopP(1).build())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class);
|
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
|
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
|
||||||
@@ -79,7 +79,7 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||||
.withGetJsonResponse().build();
|
.withGetJsonResponse().build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||||
// of request is never used in real code anyways
|
// of request is never used in real code anyways
|
||||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||||
@@ -91,15 +91,15 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
public void testWithTemplate() {
|
public void testWithTemplate() {
|
||||||
OllamaChatRequestModel req = builder.withTemplate("System Template")
|
OllamaChatRequestModel req = builder.withTemplate("System Template")
|
||||||
.build();
|
.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWithStreaming() {
|
public void testWithStreaming() {
|
||||||
OllamaChatRequestModel req = builder.withStreaming().build();
|
OllamaChatRequestModel req = builder.withStreaming().build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
|
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -107,7 +107,7 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
String expectedKeepAlive = "5m";
|
String expectedKeepAlive = "5m";
|
||||||
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
|
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
|
||||||
.build();
|
.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
|
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsR
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
|
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
|
||||||
|
|
||||||
private OllamaEmbeddingsRequestBuilder builder;
|
private OllamaEmbeddingsRequestBuilder builder;
|
||||||
|
|
||||||
@@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
|||||||
@Test
|
@Test
|
||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaEmbeddingsRequestModel req = builder.build();
|
OllamaEmbeddingsRequestModel req = builder.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
|||||||
OllamaEmbeddingsRequestModel req = builder
|
OllamaEmbeddingsRequestModel req = builder
|
||||||
.withOptions(b.setMirostat(1).build()).build();
|
.withOptions(b.setMirostat(1).build()).build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateReque
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
|
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
|
||||||
|
|
||||||
private OllamaGenerateRequestBuilder builder;
|
private OllamaGenerateRequestBuilder builder;
|
||||||
|
|
||||||
@@ -24,8 +24,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
|
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -34,8 +34,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
OllamaGenerateRequestModel req =
|
OllamaGenerateRequestModel req =
|
||||||
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
|
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
|
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
}
|
}
|
||||||
@@ -45,7 +45,7 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
OllamaGenerateRequestModel req =
|
OllamaGenerateRequestModel req =
|
||||||
builder.withPrompt("Some prompt").withGetJsonResponse().build();
|
builder.withPrompt("Some prompt").withGetJsonResponse().build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||||
// of request is never used in real code anyways
|
// of request is never used in real code anyways
|
||||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||||
|
|||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.Model;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeserializationOfModelResponseWithOffsetTime(){
|
||||||
|
String serializedTestStringWithOffsetTime = "{\n"
|
||||||
|
+ "\"name\": \"codellama:13b\",\n"
|
||||||
|
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n"
|
||||||
|
+ "\"size\": 7365960935,\n"
|
||||||
|
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||||
|
+ "\"details\": {\n"
|
||||||
|
+ "\"format\": \"gguf\",\n"
|
||||||
|
+ "\"family\": \"llama\",\n"
|
||||||
|
+ "\"families\": null,\n"
|
||||||
|
+ "\"parameter_size\": \"13B\",\n"
|
||||||
|
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||||
|
+ "}}";
|
||||||
|
deserialize(serializedTestStringWithOffsetTime,Model.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeserializationOfModelResponseWithZuluTime(){
|
||||||
|
String serializedTestStringWithZuluTimezone = "{\n"
|
||||||
|
+ "\"name\": \"codellama:13b\",\n"
|
||||||
|
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n"
|
||||||
|
+ "\"size\": 7365960935,\n"
|
||||||
|
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||||
|
+ "\"details\": {\n"
|
||||||
|
+ "\"format\": \"gguf\",\n"
|
||||||
|
+ "\"family\": \"llama\",\n"
|
||||||
|
+ "\"families\": null,\n"
|
||||||
|
+ "\"parameter_size\": \"13B\",\n"
|
||||||
|
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||||
|
+ "}}";
|
||||||
|
deserialize(serializedTestStringWithZuluTimezone,Model.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user