Compare commits

..

34 Commits

Author SHA1 Message Date
Amith Koujalgi
3ab9e4c283 Update README.md 2024-07-27 00:45:20 +05:30
Amith Koujalgi
2db6a22cc7 updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:39:42 +05:30
Amith Koujalgi
cc69341620 updated links
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-27 00:22:16 +05:30
Amith Koujalgi
4589a9032c updated links 2024-07-27 00:21:34 +05:30
Amith Koujalgi
da273402b5 updated links 2024-07-27 00:16:44 +05:30
Amith Koujalgi
cfa8aa14d7 updated publishing config 2024-07-26 23:57:19 +05:30
Amith Koujalgi
bc4e8303aa Update README.md 2024-07-16 23:43:55 +05:30
Amith Koujalgi
f2f740a2a0 Update README.md 2024-07-16 23:42:47 +05:30
Amith Koujalgi
4cbb783a61 Update README.md 2024-07-16 23:41:29 +05:30
Amith Koujalgi
5c9e0b7d8a Update README.md 2024-07-16 23:40:12 +05:30
koujalgi.amith@gmail.com
2f8577a24d updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-16 09:42:47 +05:30
Amith Koujalgi
02116b7025 Create CODE_OF_CONDUCT.md 2024-07-15 22:57:01 +05:30
Amith Koujalgi
f3778f8786 Update README.md 2024-07-15 22:52:11 +05:30
Amith Koujalgi
c6141634db Update README.md 2024-07-15 22:29:36 +05:30
Amith Koujalgi
d9f98ad901 Update README.md 2024-07-15 22:23:46 +05:30
Amith Koujalgi
79d97445b8 Update README.md 2024-07-14 23:13:21 +05:30
Amith Koujalgi
1c40697c96 Update README.md 2024-07-14 23:11:48 +05:30
koujalgi.amith@gmail.com
f03026abb3 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 23:04:30 +05:30
koujalgi.amith@gmail.com
63a6e81ac2 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 22:54:51 +05:30
koujalgi.amith@gmail.com
76cad0f584 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 22:50:54 +05:30
Amith Koujalgi
bee2908d1e Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-14 13:34:12 +05:30
Amith Koujalgi
8a4c9fd969 Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-14 13:27:23 +05:30
koujalgi.amith@gmail.com
d470f940b0 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:39:36 +05:30
koujalgi.amith@gmail.com
df402efaba updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:37:47 +05:30
koujalgi.amith@gmail.com
677362abbf updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:35:45 +05:30
koujalgi.amith@gmail.com
81689be194 Refactored tools API
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 11:23:36 +05:30
koujalgi.amith@gmail.com
fd93036d08 Refactor
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:07:23 +05:30
koujalgi.amith@gmail.com
c9b05a725b Refactor
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:05:43 +05:30
koujalgi.amith@gmail.com
a4e1b4afe9 Removed old maven-publish.yml
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:02:20 +05:30
koujalgi.amith@gmail.com
3d21813abb updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-14 00:00:52 +05:30
koujalgi.amith@gmail.com
383d0f56ca Updated generateAsync() API
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 23:54:49 +05:30
koujalgi.amith@gmail.com
af1b213a76 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 21:50:45 +05:30
koujalgi.amith@gmail.com
fed89a9643 updated README.md
Signed-off-by: koujalgi.amith@gmail.com <koujalgi.amith@gmail.com>
2024-07-13 21:49:26 +05:30
Amith Koujalgi
fd32aa33ff Updated README.md
Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
2024-07-13 14:30:13 +05:30
35 changed files with 918 additions and 588 deletions

58
.github/workflows/gh-mvn-publish.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: Release Artifacts to GitHub Maven Packages
on:
release:
types: [ created ]
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
server-id: github
settings-path: ${{ github.workspace }}
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
with:
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: github
regex: false
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Build with Maven
run: mvn --file pom.xml -U clean package -Punit-tests
- name: Publish to GitHub Packages Apache Maven
run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,22 +1,31 @@
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
name: Release Artifacts name: Release Artifacts to Maven Central
on: on:
release: release:
types: [ created ] types: [ created ]
#on:
# pull_request:
# types: [ opened, reopened ]
# branches: [ "main" ]
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: read contents: write
packages: write packages: write
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up JDK 17 - name: Set up JDK 17
uses: actions/setup-java@v3 uses: actions/setup-java@v3
with: with:
@@ -25,6 +34,19 @@ jobs:
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
with:
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Find and Replace - name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3 uses: jacobtomlinson/gha-find-replace@v3
with: with:
@@ -32,10 +54,42 @@ jobs:
replace: ${{ github.ref_name }} replace: ${{ github.ref_name }}
regex: false regex: false
- name: Build with Maven - name: Find and Replace
run: mvn --file pom.xml -U clean package -Punit-tests uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: central
regex: false
- name: Publish to GitHub Packages Apache Maven - name: Publish to Maven Central
run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml --file pom.xml run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
- name: Upload Release Asset - JAR
uses: actions/upload-release-asset@v1
env: env:
GITHUB_TOKEN: ${{ github.token }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}.jar
asset_name: ollama4j-${{ github.ref_name }}.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Javadoc JAR
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Sources JAR
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
asset_name: ollama4j-${{ github.ref_name }}-sources.jar
asset_content_type: application/x-jar

128
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
koujalgi.amith@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@@ -1,10 +1,10 @@
build: build:
mvn -B clean install mvn -B clean install
ut: unit-tests:
mvn clean test -Punit-tests mvn clean test -Punit-tests
it: integration-tests:
mvn clean verify -Pintegration-tests mvn clean verify -Pintegration-tests
doxygen: doxygen:

173
README.md
View File

@@ -1,46 +1,46 @@
<div style="text-align: center">
### Ollama4j ### Ollama4j
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> <p align="center">
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
</p>
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). Find more details on the [website](https://ollama4j.github.io/ollama4j/).
![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j) ![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j) ![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j) ![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j)
![Contributors](https://img.shields.io/github/contributors/amithkoujalgi/ollama4j) ![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social)
![GitHub License](https://img.shields.io/github/license/amithkoujalgi/ollama4j) ![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j)
![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j) [//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;)
![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j)
![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=green)
[![codecov](https://codecov.io/gh/amithkoujalgi/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/amithkoujalgi/ollama4j)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-raw/amithkoujalgi/ollama4j) [//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-closed-raw/amithkoujalgi/ollama4j)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-pr-raw/amithkoujalgi/ollama4j)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-pr-closed-raw/amithkoujalgi/ollama4j)
![GitHub Discussions](https://img.shields.io/github/discussions/amithkoujalgi/ollama4j)
![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
</div> [//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;)
[//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;)
[//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;)
![GitHub Downloads (all assets, all releases)](https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads)
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;) ![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green)
[![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j)
![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
[//]: # (![GitHub language count]&#40;https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j&#41;)
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;)
[//]: # (![GitHub language count]&#40;https://img.shields.io/github/languages/count/ollama4j/ollama4j&#41;)
## Table of Contents ## Table of Contents
- [How does it work?](#how-does-it-work) - [How does it work?](#how-does-it-work)
- [Requirements](#requirements) - [Requirements](#requirements)
- [Installation](#installation) - [Installation](#installation)
- [API Spec](https://amithkoujalgi.github.io/ollama4j/docs/category/apis---model-management) - [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management)
- [Javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/) - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
- [Development](#development) - [Development](#development)
- [Contributions](#get-involved) - [Contributions](#get-involved)
- [References](#references) - [References](#references)
@@ -61,43 +61,57 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
#### Requirements #### Requirements
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=just-the-message&labelColor=gray) ![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange)
[![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker] [![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker]
[ollama-link]: https://ollama.ai/ [ollama-link]: https://ollama.ai/
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
[ollama-docker]: https://hub.docker.com/r/ollama/ollama [ollama-docker]: https://hub.docker.com/r/ollama/ollama
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
#### Installation ## Installation
Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases) and update the dependency version > [!NOTE]
according to your requirements. > We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with
> publishing. Please update your repository settings to get latest version of Ollama4j.
>
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
> according to your requirements.
[![][ollama4j-releases-shield]][ollama4j-releases-link] ### For Maven
[ollama4j-releases-link]: https://github.com/amithkoujalgi/ollama4j/releases #### Using [Maven Central](https://central.sonatype.com/)
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/amithkoujalgi/ollama4j?include_prereleases&display_name=release&style=for-the-badge&label=Latest%20Release [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
##### For Maven [ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases
1. In your Maven project, add this dependency: [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20
1In your Maven project, add this dependency:
```xml ```xml
<dependency> <dependency>
<groupId>io.github.amithkoujalgi</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>1.0.74</version> <version>1.0.78</version>
</dependency> </dependency>
``` ```
2. Add repository to your project's pom.xml: #### Using GitHub's Maven Package Repository
[![][ollama4j-releases-shield]][ollama4j-releases-link]
[ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
```xml ```xml
@@ -105,7 +119,7 @@ according to your requirements.
<repository> <repository>
<id>github</id> <id>github</id>
<name>GitHub Apache Maven Packages</name> <name>GitHub Apache Maven Packages</name>
<url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url> <url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
<releases> <releases>
<enabled>true</enabled> <enabled>true</enabled>
</releases> </releases>
@@ -116,7 +130,7 @@ according to your requirements.
</repositories> </repositories>
``` ```
3. Add GitHub server to settings.xml. (Usually available at ~/.m2/settings.xml) 2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
```xml ```xml
@@ -134,22 +148,24 @@ according to your requirements.
</settings> </settings>
``` ```
3. In your Maven project, add this dependency:
```xml
<dependency>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.78</version>
</dependency>
```
##### For Gradle ##### For Gradle
In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL: 1. Add the dependency
```kotlin
dependencies {
val ollama4jVersion = "1.0.74"
implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion")
}
```
```groovy ```groovy
dependencies { dependencies {
implementation("io.github.amithkoujalgi:ollama4j:1.0.74") implementation 'com.github.ollama4j:ollama4j:1.0.78'
} }
``` ```
@@ -157,19 +173,20 @@ dependencies {
[//]: # () [//]: # ()
[//]: # (![Maven Central]&#40;https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j&#41;) [//]: # (![Maven Central]&#40;https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j&#41;)
[//]: # () [//]: # ()
[//]: # ([![][lib-shield]][lib]) [//]: # ([![][lib-shield]][lib])
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j [lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
#### API Spec #### API Spec
Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/). > [!TIP]
> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
#### Development #### Development
@@ -182,19 +199,18 @@ make build
Run unit tests: Run unit tests:
```shell ```shell
make ut make unit-tests
``` ```
Run integration tests: Run integration tests:
```shell ```shell
make it make integration-tests
``` ```
#### Releases #### Releases
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
Actions CI workflow.
#### Who's using Ollama4j? #### Who's using Ollama4j?
@@ -208,7 +224,7 @@ Actions CI workflow.
#### Traction #### Traction
[![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date) [![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date)
### Areas of improvement ### Areas of improvement
@@ -238,6 +254,28 @@ Actions CI workflow.
### Get Involved ### Get Involved
<div align="center">
<a href="">![Open Issues](https://img.shields.io/github/issues-raw/ollama4j/ollama4j)</a>
<a href="">![Closed Issues](https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Open PRs](https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j)</a>
<a href="">![Closed PRs](https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Discussions](https://img.shields.io/github/discussions/ollama4j/ollama4j)</a>
</div>
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Discussions]&#40;https://img.shields.io/github/discussions/ollama4j/ollama4j&#41;)
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
with code - any sort with code - any sort
of contribution is much appreciated. of contribution is much appreciated.
@@ -251,19 +289,16 @@ of contribution is much appreciated.
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
project. project.
<div style="text-align: center">
**Thanks to the amazing contributors** **Thanks to the amazing contributors**
<a href="https://github.com/amithkoujalgi/ollama4j/graphs/contributors"> <p align="center">
<img src="https://contrib.rocks/image?repo=amithkoujalgi/ollama4j" /> <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" />
</a> </a>
</p>
### Appreciate my work? ### Appreciate my work?
<p align="center">
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
</p>
</div>

View File

@@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
applications! 🌐🚀 applications! 🌐🚀
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j) 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
🌟 Key Features: 🌟 Key Features:

View File

@@ -31,7 +31,7 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
Also, see how to set those Ollama parameters using Also, see how to set those Ollama parameters using
the `OptionsBuilder` the `OptionsBuilder`
from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html). from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
## Build an empty `Options` object ## Build an empty `Options` object

View File

@@ -1,42 +1,46 @@
--- ---
sidebar_position: 3 sidebar_position: 2
--- ---
# Generate - Async # Generate - Async
This API lets you ask questions to the LLMs in a asynchronous way. This API lets you ask questions to the LLMs in a asynchronous way.
These APIs correlate to This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. background (such as threads) without blocking your code until the response arrives from the model.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
```java ```java
public class Main { public class Main {
public static void main(String[] args) { public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(60);
String prompt = "List all cricket world cup teams of 2019.";
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
String prompt = "Who are you?"; // Set the poll interval according to your needs.
// Smaller the poll interval, more frequently you receive the tokens.
int pollIntervalMilliseconds = 1000;
OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt); while (true) {
String tokens = streamer.getStream().poll();
while (!callback.isComplete() || !callback.getStream().isEmpty()) { System.out.print(tokens);
// poll for data from the response stream if (!streamer.isAlive()) {
String result = callback.getStream().poll(); break;
if (result != null) {
System.out.print(result);
} }
Thread.sleep(100); Thread.sleep(pollIntervalMilliseconds);
} }
System.out.println("\n------------------------");
System.out.println("Complete Response:");
System.out.println("------------------------");
System.out.println(streamer.getResult());
} }
} }
``` ```
You will get a response similar to: You will get a steaming response.
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.

View File

@@ -5,8 +5,8 @@ sidebar_position: 4
# Generate - With Image Files # Generate - With Image Files
This API lets you ask questions along with the image files to the LLMs. This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
:::note :::note

View File

@@ -5,8 +5,8 @@ sidebar_position: 5
# Generate - With Image URLs # Generate - With Image URLs
This API lets you ask questions along with the image files to the LLMs. This API lets you ask questions along with the image files to the LLMs.
These APIs correlate to This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
:::note :::note

View File

@@ -1,12 +1,12 @@
--- ---
sidebar_position: 2 sidebar_position: 3
--- ---
# Generate - With Tools # Generate - With Tools
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
synchronous way. synchronous way.
This API correlates to This API corresponds to
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
:::note :::note
@@ -29,8 +29,8 @@ You could do that with ease with the `function calling` capabilities of the mode
### Create Functions ### Create Functions
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns a This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
value. fuel price value.
```java ```java
public static String getCurrentFuelPrice(Map<String, Object> arguments) { public static String getCurrentFuelPrice(Map<String, Object> arguments) {
@@ -40,8 +40,8 @@ public static String getCurrentFuelPrice(Map<String, Object> arguments) {
} }
``` ```
This function takes the argument `city` and performs an operation with the argument and returns a This function takes the argument `city` and performs an operation with the argument and returns the weather for a
value. location.
```java ```java
public static String getCurrentWeather(Map<String, Object> arguments) { public static String getCurrentWeather(Map<String, Object> arguments) {
@@ -50,6 +50,19 @@ public static String getCurrentWeather(Map<String, Object> arguments) {
} }
``` ```
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
details.
```java
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
```
### Define Tool Specifications ### Define Tool Specifications
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
@@ -58,13 +71,13 @@ Lets define a sample tool specification called **Fuel Price Tool** for getting t
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. - Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
```java ```java
MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-fuel-price") .functionName("current-fuel-price")
.functionDesc("Get current fuel price") .functionDescription("Get current fuel price")
.props( .properties(
new MistralTools.PropsBuilder() new Tools.PropsBuilder()
.withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build() .build()
) )
.toolDefinition(SampleTools::getCurrentFuelPrice) .toolDefinition(SampleTools::getCurrentFuelPrice)
@@ -77,18 +90,38 @@ Lets also define a sample tool specification called **Weather Tool** for getting
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. - Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
```java ```java
MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather") .functionName("current-weather")
.functionDesc("Get current weather") .functionDescription("Get current weather")
.props( .properties(
new MistralTools.PropsBuilder() new Tools.PropsBuilder()
.withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build() .build()
) )
.toolDefinition(SampleTools::getCurrentWeather) .toolDefinition(SampleTools::getCurrentWeather)
.build(); .build();
``` ```
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
- Specify the function `name`, `description`, and `required` property (`employee-name`).
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
```java
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
```
### Register the Tools ### Register the Tools
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
@@ -96,6 +129,7 @@ Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
```shell ```shell
ollamaAPI.registerTool(fuelPriceToolSpecification); ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification); ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
``` ```
### Create prompt with Tools ### Create prompt with Tools
@@ -103,14 +137,14 @@ ollamaAPI.registerTool(weatherToolSpecification);
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. `Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
```shell ```shell
String prompt1 = new MistralTools.PromptBuilder() String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification) .withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification) .withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?") .withPrompt("What is the petrol price in Bengaluru?")
.build(); .build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, false, new OptionsBuilder().build()); OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
} }
``` ```
@@ -120,21 +154,21 @@ You will get a response similar to:
::::tip[LLM Response] ::::tip[LLM Response]
[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
:::: ::::
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. `Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
```shell ```shell
String prompt2 = new MistralTools.PromptBuilder() String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification) .withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification) .withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?") .withPrompt("What is the current weather in Bengaluru?")
.build(); .build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, false, new OptionsBuilder().build()); OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
} }
``` ```
@@ -144,25 +178,53 @@ You will get a response similar to:
::::tip[LLM Response] ::::tip[LLM Response]
[Response from tool 'current-weather']: Currently Bengaluru's weather is nice [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
::::
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
```shell
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Again, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
:::: ::::
### Full Example ### Full Example
```java ```java
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.tools.ToolDef; import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.tools.MistralTools;
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult;
import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction;
import io.github.amithkoujalgi.ollama4j.core.tools.Tools;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map; import java.util.Map;
import java.util.UUID;
public class FunctionCallingWithMistral { public class FunctionCallingWithMistralExample {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/"; String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaAPI ollamaAPI = new OllamaAPI(host);
@@ -170,78 +232,113 @@ public class FunctionCallingWithMistral {
String model = "mistral"; String model = "mistral";
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder()
.functionName("current-fuel-price") .functionName("current-fuel-price")
.functionDesc("Get current fuel price") .functionDescription("Get current fuel price")
.props( .properties(
new MistralTools.PropsBuilder() new Tools.PropsBuilder()
.withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build() .build()
) )
.toolDefinition(SampleTools::getCurrentFuelPrice) .toolDefinition(SampleTools::getCurrentFuelPrice)
.build(); .build();
MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather") .functionName("current-weather")
.functionDesc("Get current weather") .functionDescription("Get current weather")
.props( .properties(
new MistralTools.PropsBuilder() new Tools.PropsBuilder()
.withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build() .build()
) )
.toolDefinition(SampleTools::getCurrentWeather) .toolDefinition(SampleTools::getCurrentWeather)
.build(); .build();
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
ollamaAPI.registerTool(fuelPriceToolSpecification); ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification); ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
String prompt1 = new MistralTools.PromptBuilder() String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification) .withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification) .withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?") .withPrompt("What is the petrol price in Bengaluru?")
.build(); .build();
String prompt2 = new MistralTools.PromptBuilder() ask(ollamaAPI, model, prompt1);
String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification) .withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification) .withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?") .withPrompt("What is the current weather in Bengaluru?")
.build(); .build();
ask(ollamaAPI, model, prompt1);
ask(ollamaAPI, model, prompt2); ask(ollamaAPI, model, prompt2);
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
ask(ollamaAPI, model, prompt3);
} }
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException { public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, false, new OptionsBuilder().build()); OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
} }
} }
} }
class SampleTools { class SampleTools {
public static String getCurrentFuelPrice(Map<String, Object> arguments) { public static String getCurrentFuelPrice(Map<String, Object> arguments) {
// Get details from fuel price API
String location = arguments.get("location").toString(); String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString(); String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
} }
public static String getCurrentWeather(Map<String, Object> arguments) { public static String getCurrentWeather(Map<String, Object> arguments) {
// Get details from weather API
String location = arguments.get("city").toString(); String location = arguments.get("city").toString();
return "Currently " + location + "'s weather is nice."; return "Currently " + location + "'s weather is nice.";
} }
} }
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
``` ```
Run this full example and you will get a response similar to: Run this full example and you will get a response similar to:
::::tip[LLM Response] ::::tip[LLM Response]
[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
[Response from tool 'current-weather']: Currently Bengaluru's weather is nice
:::: ::::
### Room for improvement ### Room for improvement

View File

@@ -5,8 +5,8 @@ sidebar_position: 1
# Generate - Sync # Generate - Sync
This API lets you ask questions to the LLMs in a synchronous way. This API lets you ask questions to the LLMs in a synchronous way.
These APIs correlate to This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
Use the `OptionBuilder` to build the `Options` object Use the `OptionBuilder` to build the `Options` object
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
@@ -141,7 +141,7 @@ public class Main {
``` ```
_Note: Here I've used _Note: Here I've used
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._ containing a database schema from within this library for demonstration purposes._
You'd then get a response from the model: You'd then get a response from the model:

View File

@@ -84,7 +84,7 @@ Add the dependency to your project's `pom.xml`.
</dependency> </dependency>
``` ```
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
example, example,

View File

@@ -20,7 +20,7 @@ const config = {
// GitHub pages deployment config. // GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these. // If you aren't using GitHub pages, you don't need these.
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name. organizationName: 'ollama4j', // Usually your GitHub org/user name.
projectName: 'ollama4j', // Usually your repo name. projectName: 'ollama4j', // Usually your repo name.
onBrokenLinks: 'throw', onBrokenLinks: 'throw',
@@ -46,14 +46,14 @@ const config = {
// Please change this to your repo. // Please change this to your repo.
// Remove this to remove the "edit this page" links. // Remove this to remove the "edit this page" links.
editUrl: editUrl:
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', 'https://github.com/ollama4j/ollama4j/blob/main/docs',
}, },
blog: { blog: {
showReadingTime: true, showReadingTime: true,
// Please change this to your repo. // Please change this to your repo.
// Remove this to remove the "edit this page" links. // Remove this to remove the "edit this page" links.
editUrl: editUrl:
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', 'https://github.com/ollama4j/ollama4j/blob/main/docs',
}, },
theme: { theme: {
customCss: './src/css/custom.css', customCss: './src/css/custom.css',
@@ -80,11 +80,11 @@ const config = {
position: 'left', position: 'left',
label: 'Docs', label: 'Docs',
}, },
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, {to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
{to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, {to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
{to: '/blog', label: 'Blog', position: 'left'}, {to: '/blog', label: 'Blog', position: 'left'},
{ {
href: 'https://github.com/amithkoujalgi/ollama4j', href: 'https://github.com/ollama4j/ollama4j',
label: 'GitHub', label: 'GitHub',
position: 'right', position: 'right',
}, },
@@ -124,7 +124,7 @@ const config = {
}, },
{ {
label: 'GitHub', label: 'GitHub',
href: 'https://github.com/amithkoujalgi/ollama4j', href: 'https://github.com/ollama4j/ollama4j',
}, },
], ],
}, },

View File

@@ -1,68 +0,0 @@
## This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
## For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
#
#name: Test and Publish Package
#
##on:
## release:
## types: [ "created" ]
#
#on:
# push:
# branches: [ "main" ]
# workflow_dispatch:
#
#jobs:
# build:
# runs-on: ubuntu-latest
# permissions:
# contents: write
# packages: write
# steps:
# - uses: actions/checkout@v3
# - name: Set up JDK 11
# uses: actions/setup-java@v3
# with:
# java-version: '11'
# distribution: 'adopt-hotspot'
# server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
# settings-path: ${{ github.workspace }} # location for the settings.xml file
# - name: Build with Maven
# run: mvn --file pom.xml -U clean package -Punit-tests
# - name: Set up Apache Maven Central (Overwrite settings.xml)
# uses: actions/setup-java@v3
# with: # running setup-java again overwrites the settings.xml
# java-version: '11'
# distribution: 'adopt-hotspot'
# cache: 'maven'
# server-id: ossrh
# server-username: MAVEN_USERNAME
# server-password: MAVEN_PASSWORD
# gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
# gpg-passphrase: MAVEN_GPG_PASSPHRASE
# - name: Set up Maven cache
# uses: actions/cache@v3
# with:
# path: ~/.m2/repository
# key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
# restore-keys: |
# ${{ runner.os }}-maven-
# - name: Build
# run: mvn -B -ntp clean install
# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v3
# env:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
# - name: Publish to GitHub Packages Apache Maven
# # if: >
# # github.event_name != 'pull_request' &&
# # github.ref_name == 'main' &&
# # contains(github.event.head_commit.message, 'release')
# run: |
# git config --global user.email "koujalgi.amith@gmail.com"
# git config --global user.name "amithkoujalgi"
# mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
# env:
# MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
# MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
# MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}

172
pom.xml
View File

@@ -3,13 +3,13 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>io.github.amithkoujalgi</groupId> <groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId> <artifactId>ollama4j</artifactId>
<version>ollama4j-revision</version> <version>ollama4j-revision</version>
<name>Ollama4j</name> <name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description> <description>Java library for interacting with Ollama API.</description>
<url>https://github.com/amithkoujalgi/ollama4j</url> <url>https://github.com/ollama4j/ollama4j</url>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@@ -33,15 +33,15 @@
<licenses> <licenses>
<license> <license>
<name>MIT License</name> <name>MIT License</name>
<url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url> <url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
</license> </license>
</licenses> </licenses>
<scm> <scm>
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> <connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> <developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url> <url>https://github.com/ollama4j/ollama4j</url>
<tag>v1.0.16</tag> <tag>ollama4j-revision</tag>
</scm> </scm>
<build> <build>
@@ -72,27 +72,7 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
<!-- <version>1.5</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>sign-artifacts</id>-->
<!-- <phase>verify</phase>-->
<!-- <goals>-->
<!-- <goal>sign</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- &lt;!&ndash; This is necessary for gpg to not try to use the pinentry programs &ndash;&gt;-->
<!-- <gpgArguments>-->
<!-- <arg>&#45;&#45;pinentry-mode</arg>-->
<!-- <arg>loopback</arg>-->
<!-- </gpgArguments>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<!-- Surefire Plugin for Unit Tests --> <!-- Surefire Plugin for Unit Tests -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
@@ -129,15 +109,23 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-release-plugin</artifactId>--> <plugin>
<!-- <version>3.0.1</version>--> <groupId>org.apache.maven.plugins</groupId>
<!-- <configuration>--> <artifactId>maven-gpg-plugin</artifactId>
<!-- &lt;!&ndash; <goals>install</goals>&ndash;&gt;--> <version>1.5</version>
<!-- <tagNameFormat>v@{project.version}</tagNameFormat>--> <executions>
<!-- </configuration>--> <execution>
<!-- </plugin>--> <id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
@@ -189,27 +177,39 @@
</dependency> </dependency>
</dependencies> </dependencies>
<!-- <distributionManagement>-->
<!-- <snapshotRepository>-->
<!-- <id>ossrh</id>-->
<!-- <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>-->
<!-- </snapshotRepository>-->
<!-- <repository>-->
<!-- <id>ossrh</id>-->
<!-- <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>-->
<!-- </repository>-->
<!-- </distributionManagement>-->
<!-- Replaced publishing packages to GitHub Packages instead of Maven central -->
<distributionManagement> <distributionManagement>
<repository> <repository>
<id>github</id> <id>mvn-repo-id</id>
<name>GitHub Packages</name>
<url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url>
</repository> </repository>
</distributionManagement> </distributionManagement>
<profiles> <profiles>
<profile>
<id>ossrh</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<gpg.executable>gpg2</gpg.executable>
<test.env>unit</test.env>
<skipUnitTests>false</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.sonatype.central</groupId>
<artifactId>central-publishing-maven-plugin</artifactId>
<version>0.5.0</version>
<extensions>true</extensions>
<configuration>
<publishingServerId>mvn-repo-id</publishingServerId>
<autoPublish>true</autoPublish>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile> <profile>
<id>unit-tests</id> <id>unit-tests</id>
<properties> <properties>
@@ -218,7 +218,7 @@
<skipIntegrationTests>true</skipIntegrationTests> <skipIntegrationTests>true</skipIntegrationTests>
</properties> </properties>
<activation> <activation>
<activeByDefault>true</activeByDefault> <activeByDefault>false</activeByDefault>
</activation> </activation>
<build> <build>
<plugins> <plugins>
@@ -261,39 +261,39 @@
</properties> </properties>
<build> <build>
<plugins> <plugins>
<!-- <plugin>--> <plugin>
<!-- <groupId>org.apache.maven.plugins</groupId>--> <groupId>org.apache.maven.plugins</groupId>
<!-- <artifactId>maven-gpg-plugin</artifactId>--> <artifactId>maven-gpg-plugin</artifactId>
<!-- <version>3.1.0</version>--> <version>3.1.0</version>
<!-- <executions>--> <executions>
<!-- <execution>--> <execution>
<!-- <id>sign-artifacts</id>--> <id>sign-artifacts</id>
<!-- <phase>verify</phase>--> <phase>verify</phase>
<!-- <goals>--> <goals>
<!-- <goal>sign</goal>--> <goal>sign</goal>
<!-- </goals>--> </goals>
<!-- <configuration>--> <configuration>
<!-- &lt;!&ndash; Prevent gpg from using pinentry programs. Fixes:--> <!-- Prevent gpg from using pinentry programs. Fixes:
<!-- gpg: signing failed: Inappropriate ioctl for device &ndash;&gt;--> gpg: signing failed: Inappropriate ioctl for device -->
<!-- <gpgArguments>--> <gpgArguments>
<!-- <arg>&#45;&#45;pinentry-mode</arg>--> <arg>--pinentry-mode</arg>
<!-- <arg>loopback</arg>--> <arg>loopback</arg>
<!-- </gpgArguments>--> </gpgArguments>
<!-- </configuration>--> </configuration>
<!-- </execution>--> </execution>
<!-- </executions>--> </executions>
<!-- </plugin>--> </plugin>
<!-- <plugin>--> <plugin>
<!-- <groupId>org.sonatype.plugins</groupId>--> <groupId>org.sonatype.plugins</groupId>
<!-- <artifactId>nexus-staging-maven-plugin</artifactId>--> <artifactId>nexus-staging-maven-plugin</artifactId>
<!-- <version>1.6.13</version>--> <version>1.6.13</version>
<!-- <extensions>true</extensions>--> <extensions>true</extensions>
<!-- <configuration>--> <configuration>
<!-- <serverId>ossrh</serverId>--> <serverId>ossrh</serverId>
<!-- <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>--> <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
<!-- <autoReleaseAfterClose>true</autoReleaseAfterClose>--> <autoReleaseAfterClose>true</autoReleaseAfterClose>
<!-- </configuration>--> </configuration>
<!-- </plugin>--> </plugin>
<plugin> <plugin>
<groupId>org.jacoco</groupId> <groupId>org.jacoco</groupId>

View File

@@ -1,6 +1,8 @@
package io.github.amithkoujalgi.ollama4j.core; package io.github.amithkoujalgi.ollama4j.core;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException;
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolNotFoundException;
import io.github.amithkoujalgi.ollama4j.core.models.*; import io.github.amithkoujalgi.ollama4j.core.models.*;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
@@ -9,10 +11,12 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.models.request.*; import io.github.amithkoujalgi.ollama4j.core.models.request.*;
import io.github.amithkoujalgi.ollama4j.core.tools.*; import io.github.amithkoujalgi.ollama4j.core.tools.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Setter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -36,10 +40,22 @@ public class OllamaAPI {
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
private final String host; private final String host;
/**
* -- SETTER --
* Set request timeout in seconds. Default is 3 seconds.
*/
@Setter
private long requestTimeoutSeconds = 10; private long requestTimeoutSeconds = 10;
/**
* -- SETTER --
* Set/unset logging of responses
*/
@Setter
private boolean verbose = true; private boolean verbose = true;
private BasicAuth basicAuth; private BasicAuth basicAuth;
private final ToolRegistry toolRegistry = new ToolRegistry();
/** /**
* Instantiates the Ollama API. * Instantiates the Ollama API.
* *
@@ -53,24 +69,6 @@ public class OllamaAPI {
} }
} }
/**
* Set request timeout in seconds. Default is 3 seconds.
*
* @param requestTimeoutSeconds the request timeout in seconds
*/
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
/**
* Set/unset logging of responses
*
* @param verbose true/false
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/** /**
* Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
* *
@@ -360,15 +358,15 @@ public class OllamaAPI {
} }
/** /**
* Convenience method to call Ollama API without streaming responses. * Generates response using the specified AI model and prompt (in blocking mode).
* <p> * <p>
* Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)}
* *
* @param model Model to use * @param model The name or identifier of the AI model to use for generating the response.
* @param prompt Prompt text * @param prompt The input text or prompt to provide to the AI model.
* @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. * @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
* @param options Additional Options * @param options Additional options or configurations to use when generating the response.
* @return OllamaResult * @return {@link OllamaResult}
*/ */
public OllamaResult generate(String model, String prompt, boolean raw, Options options) public OllamaResult generate(String model, String prompt, boolean raw, Options options)
throws OllamaBaseException, IOException, InterruptedException { throws OllamaBaseException, IOException, InterruptedException {
@@ -376,17 +374,36 @@ public class OllamaAPI {
} }
public OllamaToolsResult generateWithTools(String model, String prompt, boolean raw, Options options) /**
throws OllamaBaseException, IOException, InterruptedException { * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools
* on the generated response.
*
* @param model The name or identifier of the AI model to use for generating the response.
* @param prompt The input text or prompt to provide to the AI model.
* @param options Additional options or configurations to use when generating the response.
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output.
* @throws OllamaBaseException If there is an error related to the Ollama API or service.
* @throws IOException If there is an error related to input/output operations.
* @throws InterruptedException If the method is interrupted while waiting for the AI model
* to generate the response or for the tools to be invoked.
*/
public OllamaToolsResult generateWithTools(String model, String prompt, Options options)
throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
boolean raw = true;
OllamaToolsResult toolResult = new OllamaToolsResult(); OllamaToolsResult toolResult = new OllamaToolsResult();
Map<ToolDef, Object> toolResults = new HashMap<>(); Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
OllamaResult result = generate(model, prompt, raw, options, null); OllamaResult result = generate(model, prompt, raw, options, null);
toolResult.setModelResult(result); toolResult.setModelResult(result);
List<ToolDef> toolDefs = Utils.getObjectMapper().readValue(result.getResponse(), Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolDef.class)); String toolsResponse = result.getResponse();
for (ToolDef toolDef : toolDefs) { if (toolsResponse.contains("[TOOL_CALLS]")) {
toolResults.put(toolDef, invokeTool(toolDef)); toolsResponse = toolsResponse.replace("[TOOL_CALLS]", "");
}
List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class));
for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) {
toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec));
} }
toolResult.setToolResults(toolResults); toolResult.setToolResults(toolResults);
return toolResult; return toolResult;
@@ -402,15 +419,15 @@ public class OllamaAPI {
* @param prompt the prompt/question text * @param prompt the prompt/question text
* @return the ollama async result callback handle * @return the ollama async result callback handle
*/ */
public OllamaAsyncResultCallback generateAsync(String model, String prompt, boolean raw) { public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
ollamaRequestModel.setRaw(raw); ollamaRequestModel.setRaw(raw);
URI uri = URI.create(this.host + "/api/generate"); URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultCallback ollamaAsyncResultCallback = OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
new OllamaAsyncResultCallback( new OllamaAsyncResultStreamer(
getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
ollamaAsyncResultCallback.start(); ollamaAsyncResultStreamer.start();
return ollamaAsyncResultCallback; return ollamaAsyncResultStreamer;
} }
/** /**
@@ -508,7 +525,7 @@ public class OllamaAPI {
* Hint: the OllamaChatRequestModel#getStream() property is not implemented. * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
* *
* @param request request object to be sent to the server * @param request request object to be sent to the server
* @return * @return {@link OllamaChatResult}
* @throws OllamaBaseException any response code than 200 has been returned * @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
@@ -524,7 +541,7 @@ public class OllamaAPI {
* *
* @param request request object to be sent to the server * @param request request object to be sent to the server
* @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
* @return * @return {@link OllamaChatResult}
* @throws OllamaBaseException any response code than 200 has been returned * @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read * @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen * @throws InterruptedException in case the server is not reachable or network issues happen
@@ -541,6 +558,10 @@ public class OllamaAPI {
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
} }
public void registerTool(Tools.ToolSpecification toolSpecification) {
toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
}
// technical private methods // // technical private methods //
private static String encodeFileToBase64(File file) throws IOException { private static String encodeFileToBase64(File file) throws IOException {
@@ -603,22 +624,20 @@ public class OllamaAPI {
} }
public void registerTool(MistralTools.ToolSpecification toolSpecification) { private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException {
ToolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
}
private Object invokeTool(ToolDef toolDef) {
try { try {
String methodName = toolDef.getName(); String methodName = toolFunctionCallSpec.getName();
Map<String, Object> arguments = toolDef.getArguments(); Map<String, Object> arguments = toolFunctionCallSpec.getArguments();
DynamicFunction function = ToolRegistry.getFunction(methodName); ToolFunction function = toolRegistry.getFunction(methodName);
if (verbose) {
logger.debug("Invoking function {} with arguments {}", methodName, arguments);
}
if (function == null) { if (function == null) {
throw new IllegalArgumentException("No such tool: " + methodName); throw new ToolNotFoundException("No such tool: " + methodName);
} }
return function.apply(arguments); return function.apply(arguments);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e);
return "Error calling tool: " + e.getMessage();
} }
} }
} }

View File

@@ -0,0 +1,18 @@
package io.github.amithkoujalgi.ollama4j.core;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
@Override
public String poll() {
StringBuilder tokens = new StringBuilder();
Iterator<String> iterator = this.listIterator();
while (iterator.hasNext()) {
tokens.append(iterator.next());
iterator.remove();
}
return tokens.toString();
}
}

View File

@@ -1,7 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String>{
void accept(String message);
}

View File

@@ -0,0 +1,8 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions;
public class ToolInvocationException extends Exception {
public ToolInvocationException(String s, Exception e) {
super(s, e);
}
}

View File

@@ -0,0 +1,8 @@
package io.github.amithkoujalgi.ollama4j.core.exceptions;
public class ToolNotFoundException extends Exception {
public ToolNotFoundException(String s) {
super(s);
}
}

View File

@@ -1,6 +1,6 @@
package io.github.amithkoujalgi.ollama4j.core.impl; package io.github.amithkoujalgi.ollama4j.core.impl;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler { public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer(); private final StringBuffer response = new StringBuffer();

View File

@@ -1,143 +0,0 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.LinkedList;
import java.util.Queue;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@Data
@EqualsAndHashCode(callSuper = true)
@SuppressWarnings("unused")
public class OllamaAsyncResultCallback extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequestModel ollamaRequestModel;
private final Queue<String> queue = new LinkedList<>();
private String result;
private boolean isDone;
/**
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
* failure. If the request was a failure, the `getResponse()` method will return the error
* message.
*/
@Getter private boolean succeeded;
private long requestTimeoutSeconds;
/**
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
* server.
*/
@Getter private int httpStatusCode;
/** -- GETTER -- Returns the response time in milliseconds. */
@Getter private long responseTime = 0;
public OllamaAsyncResultCallback(
HttpRequest.Builder requestBuilder,
OllamaGenerateRequestModel ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
this.isDone = false;
this.result = "";
this.queue.add("");
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
@Override
public void run() {
HttpClient httpClient = HttpClient.newHttpClient();
try {
long startTime = System.currentTimeMillis();
HttpRequest request =
requestBuilder
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.build();
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
this.httpStatusCode = statusCode;
InputStream responseBodyStream = response.body();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
queue.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaGenerateResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
queue.add(ollamaResponseModel.getResponse());
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(ollamaResponseModel.getResponse());
}
}
}
this.isDone = true;
this.succeeded = true;
this.result = responseBuffer.toString();
long endTime = System.currentTimeMillis();
responseTime = endTime - startTime;
}
if (statusCode != 200) {
throw new OllamaBaseException(this.result);
}
} catch (IOException | InterruptedException | OllamaBaseException e) {
this.isDone = true;
this.succeeded = false;
this.result = "[FAILED] " + e.getMessage();
}
}
/**
* Returns the status of the thread. This does not indicate that the request was successful or a
* failure, rather it is just a status flag to indicate if the thread is active or ended.
*
* @return boolean - status
*/
public boolean isComplete() {
return isDone;
}
/**
* Returns the final completion/response when the execution completes. Does not return intermediate results.
*
* @return String completion/response text
*/
public String getResponse() {
return result;
}
public Queue<String> getStream() {
return queue;
}
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
}

View File

@@ -0,0 +1,124 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
@Data
@EqualsAndHashCode(callSuper = true)
@SuppressWarnings("unused")
public class OllamaAsyncResultStreamer extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequestModel ollamaRequestModel;
private final OllamaResultStream stream = new OllamaResultStream();
private String completeResponse;
/**
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
* failure. If the request was a failure, the `getResponse()` method will return the error
* message.
*/
@Getter
private boolean succeeded;
@Setter
private long requestTimeoutSeconds;
/**
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
* server.
*/
@Getter
private int httpStatusCode;
/**
* -- GETTER -- Returns the response time in milliseconds.
*/
@Getter
private long responseTime = 0;
public OllamaAsyncResultStreamer(
HttpRequest.Builder requestBuilder,
OllamaGenerateRequestModel ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
this.completeResponse = "";
this.stream.add("");
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
@Override
public void run() {
ollamaRequestModel.setStream(true);
HttpClient httpClient = HttpClient.newHttpClient();
try {
long startTime = System.currentTimeMillis();
HttpRequest request =
requestBuilder
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.build();
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
this.httpStatusCode = statusCode;
InputStream responseBodyStream = response.body();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
stream.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaGenerateResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
String res = ollamaResponseModel.getResponse();
stream.add(res);
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(res);
}
}
}
this.succeeded = true;
this.completeResponse = responseBuffer.toString();
long endTime = System.currentTimeMillis();
responseTime = endTime - startTime;
}
if (statusCode != 200) {
throw new OllamaBaseException(this.completeResponse);
}
} catch (IOException | InterruptedException | OllamaBaseException e) {
this.succeeded = false;
this.completeResponse = "[FAILED] " + e.getMessage();
}
}
}

View File

@@ -1,10 +1,10 @@
package io.github.amithkoujalgi.ollama4j.core.models.chat; package io.github.amithkoujalgi.ollama4j.core.models.chat;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaChatStreamObserver { public class OllamaChatStreamObserver {
private OllamaStreamHandler streamHandler; private OllamaStreamHandler streamHandler;

View File

@@ -3,8 +3,6 @@ package io.github.amithkoujalgi.ollama4j.core.models.generate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaGenerateStreamObserver { public class OllamaGenerateStreamObserver {
private OllamaStreamHandler streamHandler; private OllamaStreamHandler streamHandler;

View File

@@ -0,0 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String> {
void accept(String message);
}

View File

@@ -1,12 +1,12 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.amithkoujalgi.ollama4j.core.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@@ -1,12 +1,12 @@
package io.github.amithkoujalgi.ollama4j.core.models.request; package io.github.amithkoujalgi.ollama4j.core.models.request;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@@ -5,6 +5,8 @@ import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
import java.util.Map; import java.util.Map;
@Data @Data
@@ -12,5 +14,22 @@ import java.util.Map;
@AllArgsConstructor @AllArgsConstructor
public class OllamaToolsResult { public class OllamaToolsResult {
private OllamaResult modelResult; private OllamaResult modelResult;
private Map<ToolDef, Object> toolResults; private Map<ToolFunctionCallSpec, Object> toolResults;
public List<ToolResult> getToolResults() {
List<ToolResult> results = new ArrayList<>();
for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
}
return results;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ToolResult {
private String functionName;
private Map<String, Object> functionArguments;
private Object result;
}
} }

View File

@@ -3,6 +3,6 @@ package io.github.amithkoujalgi.ollama4j.core.tools;
import java.util.Map; import java.util.Map;
@FunctionalInterface @FunctionalInterface
public interface DynamicFunction { public interface ToolFunction {
Object apply(Map<String, Object> arguments); Object apply(Map<String, Object> arguments);
} }

View File

@@ -9,10 +9,8 @@ import java.util.Map;
@Data @Data
@AllArgsConstructor @AllArgsConstructor
@NoArgsConstructor @NoArgsConstructor
public class ToolDef { public class ToolFunctionCallSpec {
private String name; private String name;
private Map<String, Object> arguments; private Map<String, Object> arguments;
} }

View File

@@ -4,14 +4,13 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class ToolRegistry { public class ToolRegistry {
private static final Map<String, DynamicFunction> functionMap = new HashMap<>(); private final Map<String, ToolFunction> functionMap = new HashMap<>();
public ToolFunction getFunction(String name) {
public static DynamicFunction getFunction(String name) {
return functionMap.get(name); return functionMap.get(name);
} }
public static void addFunction(String name, DynamicFunction function) { public void addFunction(String name, ToolFunction function) {
functionMap.put(name, function); functionMap.put(name, function);
} }
} }

View File

@@ -14,14 +14,14 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public class MistralTools { public class Tools {
@Data @Data
@Builder @Builder
public static class ToolSpecification { public static class ToolSpecification {
private String functionName; private String functionName;
private String functionDesc; private String functionDescription;
private Map<String, PromptFuncDefinition.Property> props; private Map<String, PromptFuncDefinition.Property> properties;
private DynamicFunction toolDefinition; private ToolFunction toolDefinition;
} }
@Data @Data
@@ -90,14 +90,14 @@ public class MistralTools {
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
functionDetail.setName(spec.getFunctionName()); functionDetail.setName(spec.getFunctionName());
functionDetail.setDescription(spec.getFunctionDesc()); functionDetail.setDescription(spec.getFunctionDescription());
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
parameters.setType("object"); parameters.setType("object");
parameters.setProperties(spec.getProps()); parameters.setProperties(spec.getProperties());
List<String> requiredValues = new ArrayList<>(); List<String> requiredValues = new ArrayList<>();
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProps().entrySet()) { for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
if (p.getValue().isRequired()) { if (p.getValue().isRequired()) {
requiredValues.add(p.getKey()); requiredValues.add(p.getKey());
} }
@@ -109,31 +109,5 @@ public class MistralTools {
tools.add(def); tools.add(def);
return this; return this;
} }
//
// public PromptBuilder withToolSpecification(String functionName, String functionDesc, Map<String, PromptFuncDefinition.Property> props) {
// PromptFuncDefinition def = new PromptFuncDefinition();
// def.setType("function");
//
// PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
// functionDetail.setName(functionName);
// functionDetail.setDescription(functionDesc);
//
// PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
// parameters.setType("object");
// parameters.setProperties(props);
//
// List<String> requiredValues = new ArrayList<>();
// for (Map.Entry<String, PromptFuncDefinition.Property> p : props.entrySet()) {
// if (p.getValue().isRequired()) {
// requiredValues.add(p.getKey());
// }
// }
// parameters.setRequired(requiredValues);
// functionDetail.setParameters(parameters);
// def.setFunction(functionDetail);
//
// tools.add(def);
// return this;
// }
} }
} }

View File

@@ -3,7 +3,7 @@ package io.github.amithkoujalgi.ollama4j.unittests;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback; import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
@@ -157,7 +157,7 @@ class TestMockedAPIs {
String model = OllamaModelType.LLAMA2; String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text"; String prompt = "some prompt text";
when(ollamaAPI.generateAsync(model, prompt, false)) when(ollamaAPI.generateAsync(model, prompt, false))
.thenReturn(new OllamaAsyncResultCallback(null, null, 3)); .thenReturn(new OllamaAsyncResultStreamer(null, null, 3));
ollamaAPI.generateAsync(model, prompt, false); ollamaAPI.generateAsync(model, prompt, false);
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
} }