Compare commits

..

No commits in common. "main" and "v1.0.68" have entirely different histories.

101 changed files with 2375 additions and 3706 deletions

View File

@ -1,32 +0,0 @@
name: Build and Publish
on: push
jobs:
build:
runs-on: standard-22.04
steps:
- name: Check out
uses: actions/checkout@v4
- name: Set Up Java
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '21'
#cache: 'maven'
#server-id: 'gitea'
- name: Set up Maven
uses: stCarolas/setup-maven@v5
with:
maven-version: 3.8.2
- run: cat /root/.m2/toolchains.xml
- run: cat /root/.m2/settings.xml
- name: Build
run: mvn -B package --file pom.xml
- name: Publish
run: mvn deploy

View File

@ -1,58 +0,0 @@
name: Release Artifacts to GitHub Maven Packages
on:
release:
types: [ created ]
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
server-id: github
settings-path: ${{ github.workspace }}
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
with:
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: github
regex: false
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Build with Maven
run: mvn --file pom.xml -U clean package -Punit-tests
- name: Publish to GitHub Packages Apache Maven
run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,95 +1,68 @@
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
name: Release Artifacts to Maven Central
on:
release:
types: [ created ]
name: Test and Publish Package
#on:
# pull_request:
# types: [ opened, reopened ]
# branches: [ "main" ]
# release:
# types: [ "created" ]
on:
push:
branches: [ "main" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
packages: write
steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
java-version: '11'
distribution: 'adopt-hotspot'
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: maven-settings-xml-action
uses: whelk-io/maven-settings-xml-action@v22
- name: Build with Maven
run: mvn --file pom.xml -U clean package -Punit-tests
- name: Set up Apache Maven Central (Overwrite settings.xml)
uses: actions/setup-java@v3
with: # running setup-java again overwrites the settings.xml
java-version: '11'
distribution: 'adopt-hotspot'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
gpg-passphrase: MAVEN_GPG_PASSPHRASE
- name: Set up Maven cache
uses: actions/cache@v3
with:
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: List keys
run: gpg -K
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "mvn-repo-id"
replace: central
regex: false
- name: Publish to Maven Central
run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
- name: Upload Release Asset - JAR
uses: actions/upload-release-asset@v1
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Build
run: mvn -B -ntp clean install
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}.jar
asset_name: ollama4j-${{ github.ref_name }}.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Javadoc JAR
uses: actions/upload-release-asset@v1
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Publish to GitHub Packages Apache Maven
# if: >
# github.event_name != 'pull_request' &&
# github.ref_name == 'main' &&
# contains(github.event.head_commit.message, 'release')
run: |
git config --global user.email "koujalgi.amith@gmail.com"
git config --global user.name "amithkoujalgi"
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
asset_content_type: application/x-jar
- name: Upload Release Asset - Sources JAR
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
asset_name: ollama4j-${{ github.ref_name }}-sources.jar
asset_content_type: application/x-jar
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}

View File

@ -2,8 +2,9 @@
name: Deploy Docs to GH Pages
on:
release:
types: [ created ]
# Runs on pushes targeting the default branch
push:
branches: [ "main" ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@ -46,13 +47,6 @@ jobs:
- run: cd docs && npm ci
- run: cd docs && npm run build
- name: Find and Replace
uses: jacobtomlinson/gha-find-replace@v3
with:
find: "ollama4j-revision"
replace: ${{ github.ref_name }}
regex: false
- name: Build with Maven
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs

52
.github/workflows/publish-javadoc.yml vendored Normal file
View File

@ -0,0 +1,52 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy Javadoc content to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: [ "none" ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
packages: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
runs-on: ubuntu-latest
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- uses: actions/checkout@v3
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
java-version: '11'
distribution: 'adopt-hotspot'
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
settings-path: ${{ github.workspace }} # location for the settings.xml file
- name: Build with Maven
run: mvn --file pom.xml -U clean package
- name: Setup Pages
uses: actions/configure-pages@v3
- name: Upload artifact
uses: actions/upload-pages-artifact@v2
with:
# Upload entire repository
path: './target/apidocs/.'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2

4
.gitignore vendored
View File

@ -37,8 +37,6 @@ build/
### Mac OS ###
.DS_Store
/.idea/
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
pom.xml.*
release.properties
!.idea/icon.svg
src/main/java/io/github/ollama4j/localtests

18
.idea/icon.svg generated
View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
</svg>

Before

Width:  |  Height:  |  Size: 6.1 KiB

View File

@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
koujalgi.amith@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@ -1,10 +1,10 @@
build:
mvn -B clean install
unit-tests:
ut:
mvn clean test -Punit-tests
integration-tests:
it:
mvn clean verify -Pintegration-tests
doxygen:

292
README.md
View File

@ -1,53 +1,31 @@
### Ollama4j
<p align="center">
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
</p>
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
<div align="center">
![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j)
![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j)
![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j)
![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j)
![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=green)
![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false)
![GitHub stars](https://img.shields.io/github/stars/ollama4j/ollama4j)
![GitHub forks](https://img.shields.io/github/forks/ollama4j/ollama4j)
![GitHub watchers](https://img.shields.io/github/watchers/ollama4j/ollama4j)
![Contributors](https://img.shields.io/github/contributors/ollama4j/ollama4j?style=social)
![GitHub License](https://img.shields.io/github/license/ollama4j/ollama4j)
[![codecov](https://codecov.io/gh/amithkoujalgi/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/amithkoujalgi/ollama4j)
[//]: # (![GitHub repo size]&#40;https://img.shields.io/github/repo-size/ollama4j/ollama4j&#41;)
[//]: # (![GitHub top language]&#40;https://img.shields.io/github/languages/top/ollama4j/ollama4j&#41;)
[//]: # (![JitPack Downloads This Month Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.month&label=JitPack%20Downloads%20-%20This%20Month&#41;)
[//]: # (![JitPack Downloads This Week Badge]&#40;https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fjitpack.io%2Fapi%2Fdownloads%2Fio.github.ollama4j%2Follama4j&query=%24.week&label=JitPack%20Downloads%20-%20This%20Week&#41;)
[//]: # (![JitPack Downloads Per Month Badge]&#40;https://jitpack.io/v/ollama4j/ollama4j/month.svg&#41;)
[//]: # (![GitHub Downloads &#40;all assets, all releases&#41;]&#40;https://img.shields.io/github/downloads/ollama4j/ollama4j/total?label=GitHub%20Package%20Downloads&#41;)
![GitHub last commit](https://img.shields.io/github/last-commit/ollama4j/ollama4j?color=green)
[![codecov](https://codecov.io/gh/ollama4j/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/ollama4j/ollama4j)
![Build Status](https://github.com/ollama4j/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
</div>
[//]: # (![Hits]&#40;https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama4j%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false&#41;)
[//]: # (![GitHub language count]&#40;https://img.shields.io/github/languages/count/ollama4j/ollama4j&#41;)
![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg)
## Table of Contents
- [How does it work?](#how-does-it-work)
- [Requirements](#requirements)
- [Installation](#installation)
- [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
- [API Spec](#api-spec)
- [Demo APIs](#try-out-the-apis-with-ollama-server)
- [Development](#development)
- [Contributions](#get-involved)
- [References](#references)
@ -68,181 +46,44 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
#### Requirements
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=for-the-badge&labelColor=gray&label=Java&color=orange)
![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=just-the-message&labelColor=gray)
[![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker]
<a href="https://ollama.com/" target="_blank">
<img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
</a>
[ollama]: https://ollama.ai/
<table>
<tr>
<td>
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray
<a href="https://ollama.ai/" target="_blank">Local Installation</a>
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
</td>
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray
<td>
<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
</td>
</tr>
<tr>
<td>
<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
Install on Linux
```shell
curl -fsSL https://ollama.com/install.sh | sh
```
</td>
<td>
CPU only
```shell
docker run -d -p 11434:11434 \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
NVIDIA GPU
```shell
docker run -d -p 11434:11434 \
--gpus=all \
-v ollama:/root/.ollama \
--name ollama \
ollama/ollama
```
</td>
</tr>
</table>
## Installation
> [!NOTE]
> We are now publishing the artifacts to both Maven Central and GitHub package repositories.
>
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
> according to your requirements.
### For Maven
#### Using [Maven Central](https://central.sonatype.com/)
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
[ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
#### Installation
In your Maven project, add this dependency:
```xml
<dependency>
<groupId>io.github.ollama4j</groupId>
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.79</version>
<version>1.0.57</version>
</dependency>
```
#### Using GitHub's Maven Package Repository
Latest release:
[![][ollama4j-releases-shield]][ollama4j-releases-link]
![Maven Central](https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j)
[ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
[![][lib-shield]][lib]
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
```xml
<repositories>
<repository>
<id>github</id>
<name>GitHub Apache Maven Packages</name>
<url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
```
2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
```xml
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
http://maven.apache.org/xsd/settings-1.0.0.xsd">
<servers>
<server>
<id>github</id>
<username>YOUR-USERNAME</username>
<password>YOUR-TOKEN</password>
</server>
</servers>
</settings>
```
3. In your Maven project, add this dependency:
```xml
<dependency>
<groupId>io.github.ollama4j</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.79</version>
</dependency>
```
### For Gradle
1. Add the dependency
```groovy
dependencies {
implementation 'io.github.ollama4j:ollama4j:1.0.79'
}
```
[//]: # (Latest release:)
[//]: # ()
[//]: # (![Maven Central]&#40;https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j&#41;)
[//]: # ()
[//]: # ([![][lib-shield]][lib])
[lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
#### API Spec
> [!TIP]
> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/).
#### Development
@ -255,18 +96,19 @@ make build
Run unit tests:
```shell
make unit-tests
make ut
```
Run integration tests:
```shell
make integration-tests
make it
```
#### Releases
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
Actions CI workflow.
#### Who's using Ollama4j?
@ -274,63 +116,45 @@ Newer artifacts are published via GitHub Actions CI workflow when a new release
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
- https://github.com/TEAMPB/ollama4j-vaadin-ui
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
server to translate all messages into a specfic target language.
- https://github.com/liebki/ollama-translator
- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
Ollama4j. https://github.com/ollama4j/ollama4j-web-ui
#### Traction
[![Star History Chart](https://api.star-history.com/svg?repos=ollama4j/ollama4j&type=Date)](https://star-history.com/#ollama4j/ollama4j&Date)
[![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date)
### Areas of improvement
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
snake-case conventions. (
possibly with Jackson-mapper's `@JsonProperty`)
- [x] Fix deprecated HTTP client code
- [x] Setup logging
- [x] Use lombok
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
- [ ] Add custom headers to requests
- [x] Add additional params for `ask` APIs such as:
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
- [x] `system`: system prompt to (overrides what is defined in the Modelfile)
- [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
- [x] `context`: the context parameter returned from a previous request, which can be used to keep a
short
conversational memory
- [x] `stream`: Add support for streaming responses from the model
- [ ] Add test cases
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
### Get Involved
<div align="center">
<a href="">![Open Issues](https://img.shields.io/github/issues-raw/ollama4j/ollama4j)</a>
<a href="">![Closed Issues](https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Open PRs](https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j)</a>
<a href="">![Closed PRs](https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j)</a>
<a href="">![Discussions](https://img.shields.io/github/discussions/ollama4j/ollama4j)</a>
</div>
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Issues or Pull Requests]&#40;https://img.shields.io/github/issues-pr-closed-raw/ollama4j/ollama4j&#41;)
[//]: # (![GitHub Discussions]&#40;https://img.shields.io/github/discussions/ollama4j/ollama4j&#41;)
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
with code - any sort
of contribution is much appreciated.
### References
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
### Credits
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
project.
**Thanks to the amazing contributors**
### References
<p align="center">
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" alt=""/>
</a>
</p>
### Appreciate my work?
<p align="center">
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
</p>
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)

View File

@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
applications! 🌐🚀
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
🌟 Key Features:
@ -58,9 +58,9 @@ elevate your projects.
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
Find the full API spec here: https://ollama4j.github.io/ollama4j/
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/
Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).

View File

@ -10,8 +10,6 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@ -31,14 +31,13 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
Also, see how to set those Ollama parameters using
the `OptionsBuilder`
from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html).
## Build an empty `Options` object
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class Main {
@ -56,8 +55,8 @@ public class Main {
## Build the `Options` object with values
```java
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class Main {

View File

@ -7,8 +7,6 @@ sidebar_position: 3
This API lets you check the reachability of Ollama server.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@ -1,30 +0,0 @@
---
sidebar_position: 4
---
# PS
This API provides a list of running models and details about each model currently loaded into memory.
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
```java
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import java.io.IOException;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelsProcessResponse response = ollamaAPI.ps();
System.out.println(response);
}
}
```

View File

@ -7,8 +7,6 @@ sidebar_position: 2
This API lets you set the request timeout for the Ollama client.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@ -9,8 +9,6 @@ This API lets you set the verbosity of the Ollama client.
## Try asking a question about the model.
```java
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@ -10,13 +10,6 @@ information using the history of already asked questions and the respective answ
## Create a new conversation and use chat history to augment follow up questions
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {
@ -27,7 +20,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create first user question
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.build();
// start conversation with model
@ -82,44 +75,9 @@ You will get a response similar to:
]
```
## Conversational loop
```java
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
ollamaAPI.setRequestTimeoutSeconds(60);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
System.out.println(initialChatResult.getResponse());
List<OllamaChatMessage> history = initialChatResult.getChatHistory();
while (true) {
OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
System.out.println(chatResult.getResponse());
history = chatResult.getChatHistory();
}
}
}
```
## Create a conversation where the answer is streamed
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
public class Main {
public static void main(String[] args) {
@ -128,7 +86,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
@ -155,13 +113,7 @@ You will get a response similar to:
## Use a simple Console Output Stream Handler
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
public class Main {
public static void main(String[] args) throws Exception {
@ -169,7 +121,7 @@ public class Main {
OllamaAPI ollamaAPI = new OllamaAPI(host);
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
.build();
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
ollamaAPI.chat(requestModel, streamHandler);
@ -180,14 +132,6 @@ public class Main {
## Create a new conversation with individual system prompt
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {
@ -198,7 +142,7 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
// create request with system-prompt (overriding the model defaults) and user question
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
@ -218,16 +162,6 @@ You will get a response similar to:
## Create a conversation about an image (requires model with image recognition skills)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.types.OllamaModelType;
import java.io.File;
import java.util.List;
public class Main {
public static void main(String[] args) {
@ -238,10 +172,9 @@ public class Main {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
// Load Image from File and attach to user message (alternatively images could also be added via URL)
OllamaChatRequest requestModel =
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(
new File("/path/to/image"))).build();
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
System.out.println("First answer: " + chatResult.getResponse());

View File

@ -5,44 +5,38 @@ sidebar_position: 2
# Generate - Async
This API lets you ask questions to the LLMs in a asynchronous way.
This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
background (such as threads) without blocking your code until the response arrives from the model.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) throws Exception {
public static void main(String[] args) {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(60);
String prompt = "List all cricket world cup teams of 2019.";
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
// Set the poll interval according to your needs.
// Smaller the poll interval, more frequently you receive the tokens.
int pollIntervalMilliseconds = 1000;
String prompt = "Who are you?";
while (true) {
String tokens = streamer.getStream().poll();
System.out.print(tokens);
if (!streamer.isAlive()) {
break;
OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
// poll for data from the response stream
String result = callback.getStream().poll();
if (result != null) {
System.out.print(result);
}
Thread.sleep(pollIntervalMilliseconds);
Thread.sleep(100);
}
System.out.println("\n------------------------");
System.out.println("Complete Response:");
System.out.println("------------------------");
System.out.println(streamer.getCompleteResponse());
}
}
```
You will get a response similar to:
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
> require
> natural language understanding and generation capabilities.

View File

@ -12,10 +12,6 @@ Parameters:
- `prompt`: text to generate embeddings for
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
import java.util.List;
public class Main {
public static void main(String[] args) {

View File

@ -1,12 +1,12 @@
---
sidebar_position: 4
sidebar_position: 3
---
# Generate - With Image Files
This API lets you ask questions along with the image files to the LLMs.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
:::note
@ -22,14 +22,6 @@ If you have this image downloaded and you pass the path to the downloaded image
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.File;
import java.util.List;
public class Main {
public static void main(String[] args) {
@ -40,9 +32,7 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
"What's in this image?",
List.of(
new File("/path/to/image")),
new OptionsBuilder().build()
);
new File("/path/to/image")));
System.out.println(result.getResponse());
}
}

View File

@ -1,12 +1,12 @@
---
sidebar_position: 5
sidebar_position: 4
---
# Generate - With Image URLs
This API lets you ask questions along with the image files to the LLMs.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
:::note
@ -22,13 +22,6 @@ Passing the link of this image the following code:
![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import java.util.List;
public class Main {
public static void main(String[] args) {
@ -39,9 +32,7 @@ public class Main {
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
"What's in this image?",
List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build()
);
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
System.out.println(result.getResponse());
}
}

View File

@ -1,372 +0,0 @@
---
sidebar_position: 3
---
# Generate - With Tools
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
synchronous way.
This API corresponds to
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
:::note
This is an only an experimental implementation and has a very basic design.
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
this
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
:::
### Function Calling/Tools
Assume you want to call a method in your code based on the response generated from the model.
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
transaction from your database and respond to the user with the transaction details.
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
### Create Functions
We can create static functions as our tools.
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
fuel price value.
```java
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
```
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
location.
```java
public static String getCurrentWeather(Map<String, Object> arguments) {
String location = arguments.get("city").toString();
return "Currently " + location + "'s weather is nice.";
}
```
Another way to create our tools is by creating classes by extending `ToolFunction`.
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
details.
```java
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
```
### Define Tool Specifications
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
```java
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-fuel-price")
.functionDescription("Get current fuel price")
.properties(
new Tools.PropsBuilder()
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentFuelPrice)
.build();
```
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
- Specify the function `name`, `description`, and `required` property (`city`).
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
```java
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather")
.functionDescription("Get current weather")
.properties(
new Tools.PropsBuilder()
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentWeather)
.build();
```
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
- Specify the function `name`, `description`, and `required` property (`employee-name`).
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
```java
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
```
### Register the Tools
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
```shell
ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
```
### Create prompt with Tools
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
```shell
String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Now, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
::::
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
```shell
String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Again, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
::::
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
```shell
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
```
Again, fire away your question to the model.
You will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
::::
### Full Example
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.ollama4j.tools.OllamaToolsResult;
import io.github.ollama4j.tools.ToolFunction;
import io.github.ollama4j.tools.Tools;
import io.github.ollama4j.utils.OptionsBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
public class FunctionCallingWithMistralExample {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
OllamaAPI ollamaAPI = new OllamaAPI(host);
ollamaAPI.setRequestTimeoutSeconds(60);
String model = "mistral";
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-fuel-price")
.functionDescription("Get current fuel price")
.properties(
new Tools.PropsBuilder()
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentFuelPrice)
.build();
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
.functionName("current-weather")
.functionDescription("Get current weather")
.properties(
new Tools.PropsBuilder()
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
.build()
)
.toolDefinition(SampleTools::getCurrentWeather)
.build();
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
.functionName("get-employee-details")
.functionDescription("Get employee details from the database")
.properties(
new Tools.PropsBuilder()
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
.build()
)
.toolDefinition(new DBQueryFunction())
.build();
ollamaAPI.registerTool(fuelPriceToolSpecification);
ollamaAPI.registerTool(weatherToolSpecification);
ollamaAPI.registerTool(databaseQueryToolSpecification);
String prompt1 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the petrol price in Bengaluru?")
.build();
ask(ollamaAPI, model, prompt1);
String prompt2 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withPrompt("What is the current weather in Bengaluru?")
.build();
ask(ollamaAPI, model, prompt2);
String prompt3 = new Tools.PromptBuilder()
.withToolSpecification(fuelPriceToolSpecification)
.withToolSpecification(weatherToolSpecification)
.withToolSpecification(databaseQueryToolSpecification)
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
.build();
ask(ollamaAPI, model, prompt3);
}
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
}
}
}
class SampleTools {
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
// Get details from fuel price API
String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
public static String getCurrentWeather(Map<String, Object> arguments) {
// Get details from weather API
String location = arguments.get("city").toString();
return "Currently " + location + "'s weather is nice.";
}
}
class DBQueryFunction implements ToolFunction {
@Override
public Object apply(Map<String, Object> arguments) {
// perform DB operations here
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
}
}
```
Run this full example and you will get a response similar to:
::::tip[LLM Response]
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
::::
### Potential Improvements
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
registration. For example:
```java
@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
public String getCurrentFuelPrice(Map<String, Object> arguments) {
String location = arguments.get("location").toString();
String fuelType = arguments.get("fuelType").toString();
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
```
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
specific args separately with their data types. For example:
```shell
public String getCurrentFuelPrice(String location, String fuelType) {
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
}
```
Updating async/chat APIs with support for tool-based generation.

View File

@ -5,22 +5,17 @@ sidebar_position: 1
# Generate - Sync
This API lets you ask questions to the LLMs in a synchronous way.
This API corresponds to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
These APIs correlate to
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
Use the `OptionBuilder` to build the `Options` object
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
Refer
to [this](/apis-extras/options-builder).
to [this](/docs/apis-extras/options-builder).
## Try asking a question about the model.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
@ -49,11 +44,6 @@ You will get a response similar to:
## Try asking a question, receiving the answer streamed
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
@ -75,7 +65,6 @@ public class Main {
}
}
```
You will get a response similar to:
> The
@ -90,11 +79,6 @@ You will get a response similar to:
## Try asking a question from general topics.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
public class Main {
public static void main(String[] args) {
@ -138,12 +122,6 @@ You'd then get a response from the model:
## Try asking for a Database query for your data schema.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.SamplePrompts;
public class Main {
public static void main(String[] args) {
@ -162,7 +140,7 @@ public class Main {
```
_Note: Here I've used
a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
containing a database schema from within this library for demonstration purposes._
You'd then get a response from the model:

View File

@ -1,5 +1,5 @@
---
sidebar_position: 6
sidebar_position: 5
---
# Prompt Builder
@ -8,13 +8,13 @@ This is designed for prompt engineering. It allows you to easily build the promp
inferences.
```java
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.ollama4j.utils.PromptBuilder;
public class Main {
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
public class AskPhi {
public static void main(String[] args) throws Exception {
String host = "http://localhost:11434/";
@ -42,8 +42,7 @@ public class Main {
.addSeparator()
.add("How do I read a file in Go and print its contents to stdout?");
boolean raw = false;
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
System.out.println(response.getResponse());
}
}

View File

@ -9,8 +9,6 @@ This API lets you create a custom model on the Ollama server.
### Create a model from an existing Modelfile in the Ollama server
```java title="CreateModel.java"
import io.github.ollama4j.OllamaAPI;
public class CreateModel {
public static void main(String[] args) {

View File

@ -7,8 +7,6 @@ sidebar_position: 5
This API lets you create a delete a model from the Ollama server.
```java title="DeleteModel.java"
import io.github.ollama4j.OllamaAPI;
public class Main {
public static void main(String[] args) {

View File

@ -7,10 +7,6 @@ sidebar_position: 3
This API lets you get the details of a model on the Ollama server.
```java title="GetModelDetails.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {

View File

@ -7,11 +7,6 @@ sidebar_position: 1
This API lets you list available models on the Ollama server.
```java title="ListModels.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.response.Model;
import java.util.List;
public class ListModels {
public static void main(String[] args) {

View File

@ -7,9 +7,6 @@ sidebar_position: 2
This API lets you pull a model on the Ollama server.
```java title="PullModel.java"
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.types.OllamaModelType;
public class Main {
public static void main(String[] args) {

View File

@ -78,13 +78,13 @@ Add the dependency to your project's `pom.xml`.
```xml
<dependency>
<groupId>io.github.ollama4j</groupId>
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>1.0.78</version>
<version>1.0.27</version>
</dependency>
```
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
example,
@ -116,26 +116,6 @@ or use other suitable implementations.
Create a new Java class in your project and add this code.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI();
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
}
}
```
This uses the default Ollama host as `http://localhost:11434`.
Specify a different Ollama host that you want to connect to.
```java
import io.github.ollama4j.OllamaAPI;
public class OllamaAPITest {
public static void main(String[] args) {
@ -147,7 +127,7 @@ public class OllamaAPITest {
boolean isOllamaServerReachable = ollamaAPI.ping();
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
}
}
```

View File

@ -20,7 +20,7 @@ const config = {
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
organizationName: 'ollama4j', // Usually your GitHub org/user name.
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
projectName: 'ollama4j', // Usually your repo name.
onBrokenLinks: 'throw',
@ -40,28 +40,22 @@ const config = {
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
path: 'docs',
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
sidebarPath: './sidebars.js',
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/ollama4j/ollama4j/blob/main/docs',
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
},
blog: {
showReadingTime: true,
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/ollama4j/ollama4j/blob/main/docs',
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
},
theme: {
customCss: './src/css/custom.css',
},
gtag: {
trackingID: 'G-G7FLH6FNDC',
anonymizeIP: false,
},
}),
],
],
@ -84,11 +78,11 @@ const config = {
position: 'left',
label: 'Docs',
},
{to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
{to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
{to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
{to: '/blog', label: 'Blog', position: 'left'},
{
href: 'https://github.com/ollama4j/ollama4j',
href: 'https://github.com/amithkoujalgi/ollama4j',
label: 'GitHub',
position: 'right',
},
@ -102,7 +96,7 @@ const config = {
items: [
{
label: 'Tutorial',
to: '/intro',
to: '/docs/intro',
},
],
},
@ -128,7 +122,7 @@ const config = {
},
{
label: 'GitHub',
href: 'https://github.com/ollama4j/ollama4j',
href: 'https://github.com/amithkoujalgi/ollama4j',
},
],
},

1952
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -14,10 +14,9 @@
"write-heading-ids": "docusaurus write-heading-ids"
},
"dependencies": {
"@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-google-gtag": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0",
"@docusaurus/theme-mermaid": "^3.4.0",
"@docusaurus/core": "3.0.1",
"@docusaurus/preset-classic": "3.0.1",
"@docusaurus/theme-mermaid": "^3.0.1",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@ -25,8 +24,8 @@
"react-dom": "^18.0.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.4.0",
"@docusaurus/types": "^3.4.0"
"@docusaurus/module-type-aliases": "3.0.1",
"@docusaurus/types": "3.0.1"
},
"browserslist": {
"production": [

View File

@ -19,7 +19,7 @@ function HomepageHeader() {
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="/intro">
to="/docs/intro">
Getting Started
</Link>
</div>

114
pom.xml
View File

@ -1,16 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>io.github.ollama4j</groupId>
<groupId>io.github.amithkoujalgi</groupId>
<artifactId>ollama4j</artifactId>
<version>ollama4j-revision</version>
<version>1.0.68</version>
<name>Ollama4j</name>
<description>Java library for interacting with Ollama API.</description>
<url>https://github.com/ollama4j/ollama4j</url>
<packaging>jar</packaging>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<properties>
<maven.compiler.source>11</maven.compiler.source>
@ -33,15 +31,15 @@
<licenses>
<license>
<name>MIT License</name>
<url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
<url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url>
</license>
</licenses>
<scm>
<connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
<url>https://github.com/ollama4j/ollama4j</url>
<tag>ollama4j-revision</tag>
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
<url>https://github.com/amithkoujalgi/ollama4j</url>
<tag>v1.0.68</tag>
</scm>
<build>
@ -72,7 +70,27 @@
</execution>
</executions>
</plugin>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
<!-- <version>1.5</version>-->
<!-- <executions>-->
<!-- <execution>-->
<!-- <id>sign-artifacts</id>-->
<!-- <phase>verify</phase>-->
<!-- <goals>-->
<!-- <goal>sign</goal>-->
<!-- </goals>-->
<!-- <configuration>-->
<!-- &lt;!&ndash; This is necessary for gpg to not try to use the pinentry programs &ndash;&gt;-->
<!-- <gpgArguments>-->
<!-- <arg>&#45;&#45;pinentry-mode</arg>-->
<!-- <arg>loopback</arg>-->
<!-- </gpgArguments>-->
<!-- </configuration>-->
<!-- </execution>-->
<!-- </executions>-->
<!-- </plugin>-->
<!-- Surefire Plugin for Unit Tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@ -109,33 +127,18 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.5</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
<artifactId>maven-release-plugin</artifactId>
<version>3.0.1</version>
<configuration>
<!-- <goals>install</goals>-->
<tagNameFormat>v@{project.version}</tagNameFormat>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>gitea</id>
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.projectlombok</groupId>
@ -146,17 +149,12 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.17.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.17.1</version>
<version>2.15.3</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.5.6</version>
<version>1.4.12</version>
<scope>test</scope>
</dependency>
<dependency>
@ -186,42 +184,16 @@
<distributionManagement>
<snapshotRepository>
<id>gitea</id>
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
<id>ossrh</id>
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
</snapshotRepository>
<repository>
<id>gitea</id>
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
<id>ossrh</id>
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
</repository>
</distributionManagement>
<profiles>
<profile>
<id>ossrh</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<gpg.executable>gpg2</gpg.executable>
<test.env>unit</test.env>
<skipUnitTests>false</skipUnitTests>
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.sonatype.central</groupId>
<artifactId>central-publishing-maven-plugin</artifactId>
<version>0.5.0</version>
<extensions>true</extensions>
<configuration>
<publishingServerId>mvn-repo-id</publishingServerId>
<autoPublish>true</autoPublish>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>unit-tests</id>
<properties>
@ -230,7 +202,7 @@
<skipIntegrationTests>true</skipIntegrationTests>
</properties>
<activation>
<activeByDefault>false</activeByDefault>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>

View File

@ -1,23 +1,17 @@
package io.github.ollama4j;
package io.github.amithkoujalgi.ollama4j.core;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.ollama4j.exceptions.ToolNotFoundException;
import io.github.ollama4j.models.chat.OllamaChatMessage;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import io.github.ollama4j.models.request.*;
import io.github.ollama4j.models.response.*;
import io.github.ollama4j.tools.*;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import lombok.Setter;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.*;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -31,7 +25,9 @@ import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.time.Duration;
import java.util.*;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
/**
* The base Ollama API class.
@ -41,31 +37,12 @@ public class OllamaAPI {
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
private final String host;
/**
* -- SETTER --
* Set request timeout in seconds. Default is 3 seconds.
*/
@Setter
private long requestTimeoutSeconds = 10;
/**
* -- SETTER --
* Set/unset logging of responses
*/
@Setter
private boolean verbose = true;
private BasicAuth basicAuth;
private final ToolRegistry toolRegistry = new ToolRegistry();
/**
* Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
**/
public OllamaAPI() {
this.host = "http://localhost:11434";
}
/**
* Instantiates the Ollama API with specified Ollama host address.
* Instantiates the Ollama API.
*
* @param host the host address of Ollama server
*/
@ -77,6 +54,24 @@ public class OllamaAPI {
}
}
/**
* Set request timeout in seconds. Default is 3 seconds.
*
* @param requestTimeoutSeconds the request timeout in seconds
*/
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
/**
* Set/unset logging of responses
*
* @param verbose true/false
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
*
@ -118,37 +113,6 @@ public class OllamaAPI {
return statusCode == 200;
}
/**
* Provides a list of running models and details about each model currently loaded into memory.
*
* @return ModelsProcessResponse
*/
public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
String url = this.host + "/api/ps";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null;
try {
httpRequest =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
HttpResponse<String> response = null;
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper()
.readValue(responseString, ModelsProcessResponse.class);
} else {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/**
* List available models from Ollama server.
*
@ -375,7 +339,6 @@ public class OllamaAPI {
}
}
/**
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
* call.
@ -388,67 +351,23 @@ public class OllamaAPI {
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
* @return OllamaResult that includes response text and time taken for response
*/
public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw);
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
}
/**
* Generates response using the specified AI model and prompt (in blocking mode).
* Convenience method to call Ollama API without streaming responses.
* <p>
* Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)}
*
* @param model The name or identifier of the AI model to use for generating the response.
* @param prompt The input text or prompt to provide to the AI model.
* @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
* @param options Additional options or configurations to use when generating the response.
* @return {@link OllamaResult}
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
*/
public OllamaResult generate(String model, String prompt, boolean raw, Options options)
public OllamaResult generate(String model, String prompt, Options options)
throws OllamaBaseException, IOException, InterruptedException {
return generate(model, prompt, raw, options, null);
return generate(model, prompt, options, null);
}
/**
* Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools
* on the generated response.
*
* @param model The name or identifier of the AI model to use for generating the response.
* @param prompt The input text or prompt to provide to the AI model.
* @param options Additional options or configurations to use when generating the response.
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output.
* @throws OllamaBaseException If there is an error related to the Ollama API or service.
* @throws IOException If there is an error related to input/output operations.
* @throws InterruptedException If the method is interrupted while waiting for the AI model
* to generate the response or for the tools to be invoked.
*/
public OllamaToolsResult generateWithTools(String model, String prompt, Options options)
throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
boolean raw = true;
OllamaToolsResult toolResult = new OllamaToolsResult();
Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
OllamaResult result = generate(model, prompt, raw, options, null);
toolResult.setModelResult(result);
String toolsResponse = result.getResponse();
if (toolsResponse.contains("[TOOL_CALLS]")) {
toolsResponse = toolsResponse.replace("[TOOL_CALLS]", "");
}
List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class));
for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) {
toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec));
}
toolResult.setToolResults(toolResults);
return toolResult;
}
/**
* Generate response for a question to a model running on Ollama server and get a callback handle
* that can be used to check for status and get the response from the model later. This would be
@ -458,15 +377,15 @@ public class OllamaAPI {
* @param prompt the prompt/question text
* @return the ollama async result callback handle
*/
public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
ollamaRequestModel.setRaw(raw);
public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
new OllamaAsyncResultStreamer(
OllamaAsyncResultCallback ollamaAsyncResultCallback =
new OllamaAsyncResultCallback(
getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
ollamaAsyncResultStreamer.start();
return ollamaAsyncResultStreamer;
ollamaAsyncResultCallback.start();
return ollamaAsyncResultCallback;
}
/**
@ -489,7 +408,7 @@ public class OllamaAPI {
for (File imageFile : imageFiles) {
images.add(encodeFileToBase64(imageFile));
}
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
}
@ -525,7 +444,7 @@ public class OllamaAPI {
for (String imageURL : imageURLs) {
images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
}
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
ollamaRequestModel.setOptions(options.getOptionsMap());
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
}
@ -559,33 +478,33 @@ public class OllamaAPI {
}
/**
* Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
*
* @param request request object to be sent to the server
* @return {@link OllamaChatResult}
* @return
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen
*/
public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
return chat(request, null);
}
/**
* Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
* <p>
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
*
* @param request request object to be sent to the server
* @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
* @return {@link OllamaChatResult}
* @return
* @throws OllamaBaseException any response code than 200 has been returned
* @throws IOException in case the responseStream can not be read
* @throws InterruptedException in case the server is not reachable or network issues happen
*/
public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
OllamaResult result;
if (streamHandler != null) {
@ -597,10 +516,6 @@ public class OllamaAPI {
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
}
public void registerTool(Tools.ToolSpecification toolSpecification) {
toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
}
// technical private methods //
private static String encodeFileToBase64(File file) throws IOException {
@ -612,7 +527,7 @@ public class OllamaAPI {
}
private OllamaResult generateSyncForOllamaRequestModel(
OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler)
OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
OllamaGenerateEndpointCaller requestCaller =
new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
@ -661,22 +576,4 @@ public class OllamaAPI {
private boolean isBasicAuthCredentialsSet() {
return basicAuth != null;
}
private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException {
try {
String methodName = toolFunctionCallSpec.getName();
Map<String, Object> arguments = toolFunctionCallSpec.getArguments();
ToolFunction function = toolRegistry.getFunction(methodName);
if (verbose) {
logger.debug("Invoking function {} with arguments {}", methodName, arguments);
}
if (function == null) {
throw new ToolNotFoundException("No such tool: " + methodName);
}
return function.apply(arguments);
} catch (Exception e) {
throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e);
}
}
}

View File

@ -0,0 +1,7 @@
package io.github.amithkoujalgi.ollama4j.core;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String>{
void accept(String message);
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.exceptions;
package io.github.amithkoujalgi.ollama4j.core.exceptions;
public class OllamaBaseException extends Exception {

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.impl;
package io.github.amithkoujalgi.ollama4j.core.impl;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
private final StringBuffer response = new StringBuffer();

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models;
import lombok.AllArgsConstructor;
import lombok.Data;

View File

@ -1,7 +1,6 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import java.util.List;
import lombok.Data;
@Data

View File

@ -1,10 +1,8 @@
package io.github.ollama4j.models.response;
import java.time.OffsetDateTime;
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@ -13,9 +11,7 @@ public class Model {
private String name;
private String model;
@JsonProperty("modified_at")
private OffsetDateTime modifiedAt;
@JsonProperty("expires_at")
private OffsetDateTime expiresAt;
private String modifiedAt;
private String digest;
private long size;
@JsonProperty("details")

View File

@ -1,9 +1,9 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data

View File

@ -1,9 +1,9 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;

View File

@ -0,0 +1,143 @@
package io.github.amithkoujalgi.ollama4j.core.models;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.LinkedList;
import java.util.Queue;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@Data
@EqualsAndHashCode(callSuper = true)
@SuppressWarnings("unused")
public class OllamaAsyncResultCallback extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequestModel ollamaRequestModel;
private final Queue<String> queue = new LinkedList<>();
private String result;
private boolean isDone;
/**
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
* failure. If the request was a failure, the `getResponse()` method will return the error
* message.
*/
@Getter private boolean succeeded;
private long requestTimeoutSeconds;
/**
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
* server.
*/
@Getter private int httpStatusCode;
/** -- GETTER -- Returns the response time in milliseconds. */
@Getter private long responseTime = 0;
public OllamaAsyncResultCallback(
HttpRequest.Builder requestBuilder,
OllamaGenerateRequestModel ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
this.isDone = false;
this.result = "";
this.queue.add("");
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
@Override
public void run() {
HttpClient httpClient = HttpClient.newHttpClient();
try {
long startTime = System.currentTimeMillis();
HttpRequest request =
requestBuilder
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.build();
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
this.httpStatusCode = statusCode;
InputStream responseBodyStream = response.body();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
queue.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaGenerateResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
queue.add(ollamaResponseModel.getResponse());
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(ollamaResponseModel.getResponse());
}
}
}
this.isDone = true;
this.succeeded = true;
this.result = responseBuffer.toString();
long endTime = System.currentTimeMillis();
responseTime = endTime - startTime;
}
if (statusCode != 200) {
throw new OllamaBaseException(this.result);
}
} catch (IOException | InterruptedException | OllamaBaseException e) {
this.isDone = true;
this.succeeded = false;
this.result = "[FAILED] " + e.getMessage();
}
}
/**
* Returns the status of the thread. This does not indicate that the request was successful or a
* failure, rather it is just a status flag to indicate if the thread is active or ended.
*
* @return boolean - status
*/
public boolean isComplete() {
return isDone;
}
/**
* Returns the final completion/response when the execution completes. Does not return intermediate results.
*
* @return String completion/response text
*/
public String getResponse() {
return result;
}
public Queue<String> getStream() {
return queue;
}
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import lombok.Data;
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class OllamaCommonRequest {
public abstract class OllamaCommonRequestModel {
protected String model;
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)

View File

@ -1,11 +1,11 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class OllamaErrorResponse {
public class OllamaErrorResponseModel {
private String error;
}

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.response;
package io.github.amithkoujalgi.ollama4j.core.models;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.Data;

View File

@ -1,11 +1,11 @@
package io.github.ollama4j.models.chat;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.github.ollama4j.utils.FileToBase64Serializer;
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer;
import java.util.List;
import lombok.AllArgsConstructor;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.chat;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonValue;

View File

@ -1,9 +1,4 @@
package io.github.ollama4j.models.chat;
import io.github.ollama4j.utils.Options;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.io.File;
import java.io.IOException;
@ -13,29 +8,35 @@ import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
/**
* Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
*/
public class OllamaChatRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
request = new OllamaChatRequest(model, messages);
request = new OllamaChatRequestModel(model, messages);
}
private OllamaChatRequest request;
private OllamaChatRequestModel request;
public static OllamaChatRequestBuilder getInstance(String model){
return new OllamaChatRequestBuilder(model, new ArrayList<>());
}
public OllamaChatRequest build() {
public OllamaChatRequestModel build(){
return request;
}
public void reset(){
request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
}
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){
@ -62,9 +63,11 @@ public class OllamaChatRequestBuilder {
for (String imageUrl : imageUrls) {
try{
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
} catch (URISyntaxException e) {
}
catch (URISyntaxException e){
LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e);
} catch (IOException e) {
}
catch (IOException e){
LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
}
}
@ -75,7 +78,8 @@ public class OllamaChatRequestBuilder {
}
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
return new OllamaChatRequestBuilder(request.getModel(), messages);
this.request.getMessages().addAll(messages);
return this;
}
public OllamaChatRequestBuilder withOptions(Options options){

View File

@ -1,9 +1,8 @@
package io.github.ollama4j.models.chat;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.List;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import lombok.Getter;
import lombok.Setter;
@ -17,20 +16,20 @@ import lombok.Setter;
*/
@Getter
@Setter
public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
private List<OllamaChatMessage> messages;
public OllamaChatRequest() {}
public OllamaChatRequestModel() {}
public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
this.model = model;
this.messages = messages;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaChatRequest)) {
if (!(o instanceof OllamaChatRequestModel)) {
return false;
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.chat;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;

View File

@ -1,8 +1,8 @@
package io.github.ollama4j.models.chat;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.List;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
/**
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
@ -16,11 +16,17 @@ public class OllamaChatResult extends OllamaResult{
List<OllamaChatMessage> chatHistory) {
super(response, responseTime, httpStatusCode);
this.chatHistory = chatHistory;
appendAnswerToChatHistory(response);
}
public List<OllamaChatMessage> getChatHistory() {
return chatHistory;
}
private void appendAnswerToChatHistory(String answer){
OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
this.chatHistory.add(assistantMessage);
}
}

View File

@ -1,16 +1,18 @@
package io.github.ollama4j.models.chat;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
package io.github.amithkoujalgi.ollama4j.core.models.chat;
import java.util.ArrayList;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaChatStreamObserver {
private OllamaStreamHandler streamHandler;
private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
private String message = "";
public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
this.streamHandler = streamHandler;
}
@ -21,7 +23,8 @@ public class OllamaChatStreamObserver {
}
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
streamHandler.accept(currentResponsePart.getMessage().getContent());
message = message + currentResponsePart.getMessage().getContent();
streamHandler.accept(message);
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.embeddings;
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.embeddings;
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import io.github.ollama4j.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
public class OllamaEmbeddingsRequestBuilder {

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.embeddings;
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;

View File

@ -1,24 +1,24 @@
package io.github.ollama4j.models.generate;
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.ollama4j.utils.Options;
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
/**
* Helper class for creating {@link OllamaGenerateRequest}
* Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}
* objects using the builder-pattern.
*/
public class OllamaGenerateRequestBuilder {
private OllamaGenerateRequestBuilder(String model, String prompt){
request = new OllamaGenerateRequest(model, prompt);
request = new OllamaGenerateRequestModel(model, prompt);
}
private OllamaGenerateRequest request;
private OllamaGenerateRequestModel request;
public static OllamaGenerateRequestBuilder getInstance(String model){
return new OllamaGenerateRequestBuilder(model,"");
}
public OllamaGenerateRequest build(){
public OllamaGenerateRequestModel build(){
return request;
}

View File

@ -0,0 +1,46 @@
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequestModel() {
}
public OllamaGenerateRequestModel(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequestModel)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.models.generate;
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -1,8 +1,10 @@
package io.github.ollama4j.models.generate;
package io.github.amithkoujalgi.ollama4j.core.models.generate;
import java.util.ArrayList;
import java.util.List;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
public class OllamaGenerateStreamObserver {
private OllamaStreamHandler streamHandler;

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models.request;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models.request;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models.request;
import static io.github.ollama4j.utils.Utils.getObjectMapper;
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import lombok.AllArgsConstructor;

View File

@ -1,13 +1,14 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.chat.OllamaChatResponseModel;
import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -1,11 +1,12 @@
package io.github.ollama4j.models.request;
package io.github.amithkoujalgi.ollama4j.core.models.request;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.OllamaErrorResponse;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -77,19 +78,19 @@ public abstract class OllamaEndpointCaller {
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
LOG.warn("Status code: 404 (Not Found)");
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 401) {
LOG.warn("Status code: 401 (Unauthorized)");
OllamaErrorResponse ollamaResponseModel =
OllamaErrorResponseModel ollamaResponseModel =
Utils.getObjectMapper()
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else if (statusCode == 400) {
LOG.warn("Status code: 400 (Bad Request)");
OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponse.class);
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
OllamaErrorResponseModel.class);
responseBuffer.append(ollamaResponseModel.getError());
} else {
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);

View File

@ -0,0 +1,54 @@
package io.github.amithkoujalgi.ollama4j.core.models.request;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
private OllamaGenerateStreamObserver streamObserver;
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/generate";
}
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
responseBuffer.append(ollamaResponseModel.getResponse());
if(streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!",e);
return true;
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
return super.callSync(body);
}
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.types;
package io.github.amithkoujalgi.ollama4j.core.types;
/**
* A class to provide constants for all the supported models by Ollama.
@ -9,9 +9,6 @@ package io.github.ollama4j.types;
@SuppressWarnings("ALL")
public class OllamaModelType {
public static final String GEMMA = "gemma";
public static final String GEMMA2 = "gemma2";
public static final String LLAMA2 = "llama2";
public static final String LLAMA3 = "llama3";
public static final String MISTRAL = "mistral";
@ -33,8 +30,6 @@ public class OllamaModelType {
public static final String ZEPHYR = "zephyr";
public static final String OPENHERMES = "openhermes";
public static final String QWEN = "qwen";
public static final String QWEN2 = "qwen2";
public static final String WIZARDCODER = "wizardcoder";
public static final String LLAMA2_CHINESE = "llama2-chinese";
public static final String TINYLLAMA = "tinyllama";
@ -84,5 +79,4 @@ public class OllamaModelType {
public static final String NOTUS = "notus";
public static final String DUCKDB_NSQL = "duckdb-nsql";
public static final String ALL_MINILM = "all-minilm";
public static final String CODESTRAL = "codestral";
}

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.IOException;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.IOException;
import java.util.Base64;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.net.http.HttpRequest.BodyPublisher;
import java.net.http.HttpRequest.BodyPublishers;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.util.Map;
import lombok.Data;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.util.HashMap;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
/**
* The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It

View File

@ -1,6 +1,6 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import io.github.ollama4j.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import java.io.InputStream;
import java.util.Scanner;

View File

@ -1,4 +1,4 @@
package io.github.ollama4j.utils;
package io.github.amithkoujalgi.ollama4j.core.utils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -8,18 +8,10 @@ import java.net.URISyntaxException;
import java.net.URL;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
public class Utils {
private static ObjectMapper objectMapper;
public static ObjectMapper getObjectMapper() {
if(objectMapper == null) {
objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
}
return objectMapper;
return new ObjectMapper();
}
public static byte[] loadImageBytesFromUrl(String imageUrl)

View File

@ -1,8 +0,0 @@
package io.github.ollama4j.exceptions;
public class ToolInvocationException extends Exception {
public ToolInvocationException(String s, Exception e) {
super(s, e);
}
}

View File

@ -1,8 +0,0 @@
package io.github.ollama4j.exceptions;
public class ToolNotFoundException extends Exception {
public ToolNotFoundException(String s) {
super(s);
}
}

View File

@ -1,46 +0,0 @@
package io.github.ollama4j.models.generate;
import io.github.ollama4j.models.request.OllamaCommonRequest;
import io.github.ollama4j.utils.OllamaRequestBody;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
private String prompt;
private List<String> images;
private String system;
private String context;
private boolean raw;
public OllamaGenerateRequest() {
}
public OllamaGenerateRequest(String model, String prompt) {
this.model = model;
this.prompt = prompt;
}
public OllamaGenerateRequest(String model, String prompt, List<String> images) {
this.model = model;
this.prompt = prompt;
this.images = images;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof OllamaGenerateRequest)) {
return false;
}
return this.toString().equals(o.toString());
}
}

View File

@ -1,7 +0,0 @@
package io.github.ollama4j.models.generate;
import java.util.function.Consumer;
public interface OllamaStreamHandler extends Consumer<String> {
void accept(String message);
}

View File

@ -1,63 +0,0 @@
package io.github.ollama4j.models.ps;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelsProcessResponse {
@JsonProperty("models")
private List<ModelProcess> models;
@Data
@NoArgsConstructor
public static class ModelProcess {
@JsonProperty("name")
private String name;
@JsonProperty("model")
private String model;
@JsonProperty("size")
private long size;
@JsonProperty("digest")
private String digest;
@JsonProperty("details")
private ModelDetails details;
@JsonProperty("expires_at")
private String expiresAt; // Consider using LocalDateTime if you need to process date/time
@JsonProperty("size_vram")
private long sizeVram;
}
@Data
@NoArgsConstructor
public static class ModelDetails {
@JsonProperty("parent_model")
private String parentModel;
@JsonProperty("format")
private String format;
@JsonProperty("family")
private String family;
@JsonProperty("families")
private List<String> families;
@JsonProperty("parameter_size")
private String parameterSize;
@JsonProperty("quantization_level")
private String quantizationLevel;
}
}

View File

@ -1,51 +0,0 @@
package io.github.ollama4j.models.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.utils.OllamaRequestBody;
import io.github.ollama4j.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
private OllamaGenerateStreamObserver streamObserver;
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
super(host, basicAuth, requestTimeoutSeconds, verbose);
}
@Override
protected String getEndpointSuffix() {
return "/api/generate";
}
@Override
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
try {
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
responseBuffer.append(ollamaResponseModel.getResponse());
if (streamObserver != null) {
streamObserver.notify(ollamaResponseModel);
}
return ollamaResponseModel.isDone();
} catch (JsonProcessingException e) {
LOG.error("Error parsing the Ollama chat response!", e);
return true;
}
}
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
throws OllamaBaseException, IOException, InterruptedException {
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
return super.callSync(body);
}
}

View File

@ -1,123 +0,0 @@
package io.github.ollama4j.models.response;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
import io.github.ollama4j.utils.Utils;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
@Data
@EqualsAndHashCode(callSuper = true)
@SuppressWarnings("unused")
public class OllamaAsyncResultStreamer extends Thread {
private final HttpRequest.Builder requestBuilder;
private final OllamaGenerateRequest ollamaRequestModel;
private final OllamaResultStream stream = new OllamaResultStream();
private String completeResponse;
/**
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
* failure. If the request was a failure, the `getResponse()` method will return the error
* message.
*/
@Getter
private boolean succeeded;
@Setter
private long requestTimeoutSeconds;
/**
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
* server.
*/
@Getter
private int httpStatusCode;
/**
* -- GETTER -- Returns the response time in milliseconds.
*/
@Getter
private long responseTime = 0;
public OllamaAsyncResultStreamer(
HttpRequest.Builder requestBuilder,
OllamaGenerateRequest ollamaRequestModel,
long requestTimeoutSeconds) {
this.requestBuilder = requestBuilder;
this.ollamaRequestModel = ollamaRequestModel;
this.completeResponse = "";
this.stream.add("");
this.requestTimeoutSeconds = requestTimeoutSeconds;
}
@Override
public void run() {
ollamaRequestModel.setStream(true);
HttpClient httpClient = HttpClient.newHttpClient();
try {
long startTime = System.currentTimeMillis();
HttpRequest request =
requestBuilder
.POST(
HttpRequest.BodyPublishers.ofString(
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
.header("Content-Type", "application/json")
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
.build();
HttpResponse<InputStream> response =
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
int statusCode = response.statusCode();
this.httpStatusCode = statusCode;
InputStream responseBodyStream = response.body();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
String line;
StringBuilder responseBuffer = new StringBuilder();
while ((line = reader.readLine()) != null) {
if (statusCode == 404) {
OllamaErrorResponse ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
stream.add(ollamaResponseModel.getError());
responseBuffer.append(ollamaResponseModel.getError());
} else {
OllamaGenerateResponseModel ollamaResponseModel =
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
String res = ollamaResponseModel.getResponse();
stream.add(res);
if (!ollamaResponseModel.isDone()) {
responseBuffer.append(res);
}
}
}
this.succeeded = true;
this.completeResponse = responseBuffer.toString();
long endTime = System.currentTimeMillis();
responseTime = endTime - startTime;
}
if (statusCode != 200) {
throw new OllamaBaseException(this.completeResponse);
}
} catch (IOException | InterruptedException | OllamaBaseException e) {
this.succeeded = false;
this.completeResponse = "[FAILED] " + e.getMessage();
}
}
}

View File

@ -1,18 +0,0 @@
package io.github.ollama4j.models.response;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Queue;
public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
@Override
public String poll() {
StringBuilder tokens = new StringBuilder();
Iterator<String> iterator = this.listIterator();
while (iterator.hasNext()) {
tokens.append(iterator.next());
iterator.remove();
}
return tokens.toString();
}
}

View File

@ -1,35 +0,0 @@
package io.github.ollama4j.tools;
import io.github.ollama4j.models.response.OllamaResult;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class OllamaToolsResult {
private OllamaResult modelResult;
private Map<ToolFunctionCallSpec, Object> toolResults;
public List<ToolResult> getToolResults() {
List<ToolResult> results = new ArrayList<>();
for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
}
return results;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ToolResult {
private String functionName;
private Map<String, Object> functionArguments;
private Object result;
}
}

View File

@ -1,8 +0,0 @@
package io.github.ollama4j.tools;
import java.util.Map;
@FunctionalInterface
public interface ToolFunction {
Object apply(Map<String, Object> arguments);
}

View File

@ -1,16 +0,0 @@
package io.github.ollama4j.tools;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ToolFunctionCallSpec {
private String name;
private Map<String, Object> arguments;
}

View File

@ -1,16 +0,0 @@
package io.github.ollama4j.tools;
import java.util.HashMap;
import java.util.Map;
public class ToolRegistry {
private final Map<String, ToolFunction> functionMap = new HashMap<>();
public ToolFunction getFunction(String name) {
return functionMap.get(name);
}
public void addFunction(String name, ToolFunction function) {
functionMap.put(name, function);
}
}

View File

@ -1,113 +0,0 @@
package io.github.ollama4j.tools;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.utils.Utils;
import lombok.Builder;
import lombok.Data;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Tools {
@Data
@Builder
public static class ToolSpecification {
private String functionName;
private String functionDescription;
private Map<String, PromptFuncDefinition.Property> properties;
private ToolFunction toolDefinition;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class PromptFuncDefinition {
private String type;
private PromptFuncSpec function;
@Data
public static class PromptFuncSpec {
private String name;
private String description;
private Parameters parameters;
}
@Data
public static class Parameters {
private String type;
private Map<String, Property> properties;
private List<String> required;
}
@Data
@Builder
public static class Property {
private String type;
private String description;
@JsonProperty("enum")
@JsonInclude(JsonInclude.Include.NON_NULL)
private List<String> enumValues;
@JsonIgnore
private boolean required;
}
}
public static class PropsBuilder {
private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
props.put(key, property);
return this;
}
public Map<String, PromptFuncDefinition.Property> build() {
return props;
}
}
public static class PromptBuilder {
private final List<PromptFuncDefinition> tools = new ArrayList<>();
private String promptText;
public String build() throws JsonProcessingException {
return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
}
public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
promptText = prompt;
return this;
}
public PromptBuilder withToolSpecification(ToolSpecification spec) {
PromptFuncDefinition def = new PromptFuncDefinition();
def.setType("function");
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
functionDetail.setName(spec.getFunctionName());
functionDetail.setDescription(spec.getFunctionDescription());
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
parameters.setType("object");
parameters.setProperties(spec.getProperties());
List<String> requiredValues = new ArrayList<>();
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
if (p.getValue().isRequired()) {
requiredValues.add(p.getKey());
}
}
parameters.setRequired(requiredValues);
functionDetail.setParameters(parameters);
def.setFunction(functionDetail);
tools.add(def);
return this;
}
}
}

View File

@ -0,0 +1,393 @@
package io.github.amithkoujalgi.ollama4j.integrationtests;
import static org.junit.jupiter.api.Assertions.*;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.net.URISyntaxException;
import java.net.http.HttpConnectTimeoutException;
import java.util.List;
import java.util.Objects;
import java.util.Properties;
import lombok.Data;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class TestRealAPIs {
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
OllamaAPI ollamaAPI;
Config config;
private File getImageFileFromClasspath(String fileName) {
ClassLoader classLoader = getClass().getClassLoader();
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
}
@BeforeEach
void setUp() {
config = new Config();
ollamaAPI = new OllamaAPI(config.getOllamaURL());
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
}
@Test
@Order(1)
void testWrongEndpoint() {
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
assertThrows(ConnectException.class, ollamaAPI::listModels);
}
@Test
@Order(1)
void testEndpointReachability() {
try {
assertNotNull(ollamaAPI.listModels());
} catch (HttpConnectTimeoutException e) {
fail(e.getMessage());
} catch (Exception e) {
fail(e);
}
}
@Test
@Order(2)
void testListModels() {
testEndpointReachability();
try {
assertNotNull(ollamaAPI.listModels());
ollamaAPI.listModels().forEach(System.out::println);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(2)
void testPullModel() {
testEndpointReachability();
try {
ollamaAPI.pullModel(config.getModel());
boolean found =
ollamaAPI.listModels().stream()
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
assertTrue(found);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
void testListDtails() {
testEndpointReachability();
try {
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
assertNotNull(modelDetails);
System.out.println(modelDetails);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithDefaultOptions() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generate(
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithDefaultOptionsStreamed() {
testEndpointReachability();
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptions() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generate(
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
new OptionsBuilder().setTemperature(0.9f).build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChat() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
.withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertEquals(4,chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithSystemPrompt() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertTrue(chatResult.getResponse().startsWith("NI"));
assertEquals(3, chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithStream() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
StringBuffer sb = new StringBuffer("");
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(chatResult);
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromFileWithHistoryRecognition() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder =
OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
builder.reset();
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromURL() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageFiles() {
testEndpointReachability();
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
try {
OllamaResult result =
ollamaAPI.generateWithImageFiles(
config.getImageModel(),
"What is in this image?",
List.of(imageFile),
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageFilesStreamed() {
testEndpointReachability();
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageURLs() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generateWithImageURLs(
config.getImageModel(),
"What is in this image?",
List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
public void testEmbedding() {
testEndpointReachability();
try {
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
.getInstance(config.getModel(), "What is the capital of France?").build();
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
assertNotNull(embeddings);
assertFalse(embeddings.isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
}
@Data
class Config {
private String ollamaURL;
private String model;
private String imageModel;
private int requestTimeoutSeconds;
public Config() {
Properties properties = new Properties();
try (InputStream input =
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
if (input == null) {
throw new RuntimeException("Sorry, unable to find test-config.properties");
}
properties.load(input);
this.ollamaURL = properties.getProperty("ollama.url");
this.model = properties.getProperty("ollama.model");
this.imageModel = properties.getProperty("ollama.model.image");
this.requestTimeoutSeconds =
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
} catch (IOException e) {
throw new RuntimeException("Error loading properties", e);
}
}
}

View File

@ -0,0 +1,163 @@
package io.github.amithkoujalgi.ollama4j.unittests;
import static org.mockito.Mockito.*;
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback;
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
class TestMockedAPIs {
@Test
void testPullModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
doNothing().when(ollamaAPI).pullModel(model);
ollamaAPI.pullModel(model);
verify(ollamaAPI, times(1)).pullModel(model);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testListModels() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
try {
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
ollamaAPI.listModels();
verify(ollamaAPI, times(1)).listModels();
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testCreateModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
try {
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testDeleteModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
doNothing().when(ollamaAPI).deleteModel(model, true);
ollamaAPI.deleteModel(model, true);
verify(ollamaAPI, times(1)).deleteModel(model, true);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testGetModelDetails() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
ollamaAPI.getModelDetails(model);
verify(ollamaAPI, times(1)).getModelDetails(model);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testGenerateEmbeddings() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
ollamaAPI.generateEmbeddings(model, prompt);
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAsk() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
OptionsBuilder optionsBuilder = new OptionsBuilder();
try {
when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generate(model, prompt, optionsBuilder.build());
verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskWithImageFiles() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
verify(ollamaAPI, times(1))
.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskWithImageURLs() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
verify(ollamaAPI, times(1))
.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskAsync() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
when(ollamaAPI.generateAsync(model, prompt))
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
ollamaAPI.generateAsync(model, prompt);
verify(ollamaAPI, times(1)).generateAsync(model, prompt);
}
}

View File

@ -1,35 +1,35 @@
package io.github.ollama4j.unittests.jackson;
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.github.ollama4j.utils.Utils;
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
public abstract class AbstractSerializationTest<T> {
public abstract class AbstractRequestSerializationTest<T> {
protected ObjectMapper mapper = Utils.getObjectMapper();
protected String serialize(T obj) {
protected String serializeRequest(T req) {
try {
return mapper.writeValueAsString(obj);
return mapper.writeValueAsString(req);
} catch (JsonProcessingException e) {
fail("Could not serialize request!", e);
return null;
}
}
protected T deserialize(String jsonObject, Class<T> deserializationClass) {
protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
try {
return mapper.readValue(jsonObject, deserializationClass);
return mapper.readValue(jsonRequest, requestClass);
} catch (JsonProcessingException e) {
fail("Could not deserialize jsonObject!", e);
fail("Could not deserialize jsonRequest!", e);
return null;
}
}
protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
T req) {
assertEquals(req, unmarshalledObject);
assertEquals(req, unmarshalledRequest);
}
}

View File

@ -1,20 +1,20 @@
package io.github.ollama4j.unittests.jackson;
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.File;
import java.util.List;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequest> {
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
private OllamaChatRequestBuilder builder;
@ -25,32 +25,32 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test
public void testRequestOnlyMandatoryFields() {
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestMultipleMessages() {
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestWithMessageAndImage() {
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
}
@Test
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withOptions(b.setMirostat(1).build())
.withOptions(b.setTemperature(1L).build())
.withOptions(b.setMirostatEta(1L).build())
@ -61,8 +61,8 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
.withOptions(b.setTopP(1).build())
.build();
String jsonRequest = serialize(req);
OllamaChatRequest deserializeRequest = deserialize(jsonRequest, OllamaChatRequest.class);
String jsonRequest = serializeRequest(req);
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
@ -76,10 +76,10 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test
public void testWithJsonFormat() {
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
.withGetJsonResponse().build();
String jsonRequest = serialize(req);
String jsonRequest = serializeRequest(req);
// no jackson deserialization as format property is not boolean ==> omit as deserialization
// of request is never used in real code anyways
JSONObject jsonObject = new JSONObject(jsonRequest);
@ -89,25 +89,25 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
@Test
public void testWithTemplate() {
OllamaChatRequest req = builder.withTemplate("System Template")
OllamaChatRequestModel req = builder.withTemplate("System Template")
.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req);
}
@Test
public void testWithStreaming() {
OllamaChatRequest req = builder.withStreaming().build();
String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).isStream(), true);
OllamaChatRequestModel req = builder.withStreaming().build();
String jsonRequest = serializeRequest(req);
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
}
@Test
public void testWithKeepAlive() {
String expectedKeepAlive = "5m";
OllamaChatRequest req = builder.withKeepAlive(expectedKeepAlive)
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
.build();
String jsonRequest = serialize(req);
assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).getKeepAlive(), expectedKeepAlive);
String jsonRequest = serializeRequest(req);
assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
}
}

View File

@ -1,13 +1,13 @@
package io.github.ollama4j.unittests.jackson;
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
private OllamaEmbeddingsRequestBuilder builder;
@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractSerializationTes
@Test
public void testRequestOnlyMandatoryFields() {
OllamaEmbeddingsRequestModel req = builder.build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
}
@Test
@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractSerializationTes
OllamaEmbeddingsRequestModel req = builder
.withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req);
OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
String jsonRequest = serializeRequest(req);
OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
}

View File

@ -1,17 +1,17 @@
package io.github.ollama4j.unittests.jackson;
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import org.json.JSONObject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.utils.OptionsBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequest> {
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
private OllamaGenerateRequestBuilder builder;
@ -22,30 +22,30 @@ public class TestGenerateRequestSerialization extends AbstractSerializationTest<
@Test
public void testRequestOnlyMandatoryFields() {
OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
String jsonRequest = serialize(req);
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
String jsonRequest = serializeRequest(req);
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
}
@Test
public void testRequestWithOptions() {
OptionsBuilder b = new OptionsBuilder();
OllamaGenerateRequest req =
OllamaGenerateRequestModel req =
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
String jsonRequest = serialize(req);
OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class);
String jsonRequest = serializeRequest(req);
OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
assertEqualsAfterUnmarshalling(deserializeRequest, req);
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
}
@Test
public void testWithJsonFormat() {
OllamaGenerateRequest req =
OllamaGenerateRequestModel req =
builder.withPrompt("Some prompt").withGetJsonResponse().build();
String jsonRequest = serialize(req);
String jsonRequest = serializeRequest(req);
// no jackson deserialization as format property is not boolean ==> omit as deserialization
// of request is never used in real code anyways
JSONObject jsonObject = new JSONObject(jsonRequest);

View File

@ -1,395 +0,0 @@
package io.github.ollama4j.integrationtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.models.chat.OllamaChatRequest;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatResult;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.ollama4j.utils.OptionsBuilder;
import lombok.Data;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.net.URISyntaxException;
import java.net.http.HttpConnectTimeoutException;
import java.util.List;
import java.util.Objects;
import java.util.Properties;
import static org.junit.jupiter.api.Assertions.*;
class TestRealAPIs {
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
OllamaAPI ollamaAPI;
Config config;
private File getImageFileFromClasspath(String fileName) {
ClassLoader classLoader = getClass().getClassLoader();
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
}
@BeforeEach
void setUp() {
config = new Config();
ollamaAPI = new OllamaAPI(config.getOllamaURL());
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
}
@Test
@Order(1)
void testWrongEndpoint() {
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
assertThrows(ConnectException.class, ollamaAPI::listModels);
}
@Test
@Order(1)
void testEndpointReachability() {
try {
assertNotNull(ollamaAPI.listModels());
} catch (HttpConnectTimeoutException e) {
fail(e.getMessage());
} catch (Exception e) {
fail(e);
}
}
@Test
@Order(2)
void testListModels() {
testEndpointReachability();
try {
assertNotNull(ollamaAPI.listModels());
ollamaAPI.listModels().forEach(System.out::println);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(2)
void testPullModel() {
testEndpointReachability();
try {
ollamaAPI.pullModel(config.getModel());
boolean found =
ollamaAPI.listModels().stream()
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
assertTrue(found);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
void testListDtails() {
testEndpointReachability();
try {
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
assertNotNull(modelDetails);
System.out.println(modelDetails);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithDefaultOptions() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generate(
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
false,
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithDefaultOptionsStreamed() {
testEndpointReachability();
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generate(config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
false,
new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptions() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generate(
config.getModel(),
"What is the capital of France? And what's France's connection with Mona Lisa?",
true,
new OptionsBuilder().setTemperature(0.9f).build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChat() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
.withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertEquals(4, chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithSystemPrompt() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertFalse(chatResult.getResponse().isBlank());
assertTrue(chatResult.getResponse().startsWith("NI"));
assertEquals(3, chatResult.getChatHistory().size());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithStream() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
"What is the capital of France? And what's France's connection with Mona Lisa?")
.build();
StringBuffer sb = new StringBuffer("");
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(chatResult);
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromFileWithHistoryRecognition() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder =
OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequest requestModel =
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
builder.reset();
requestModel =
builder.withMessages(chatResult.getChatHistory())
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
assertNotNull(chatResult.getResponse());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testChatWithImageFromURL() {
testEndpointReachability();
try {
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
.build();
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
assertNotNull(chatResult);
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageFiles() {
testEndpointReachability();
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
try {
OllamaResult result =
ollamaAPI.generateWithImageFiles(
config.getImageModel(),
"What is in this image?",
List.of(imageFile),
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageFilesStreamed() {
testEndpointReachability();
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
try {
StringBuffer sb = new StringBuffer("");
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
LOG.info(s);
String substring = s.substring(sb.toString().length(), s.length());
LOG.info(substring);
sb.append(substring);
});
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
assertEquals(sb.toString().trim(), result.getResponse().trim());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
@Test
@Order(3)
void testAskModelWithOptionsAndImageURLs() {
testEndpointReachability();
try {
OllamaResult result =
ollamaAPI.generateWithImageURLs(
config.getImageModel(),
"What is in this image?",
List.of(
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
new OptionsBuilder().build());
assertNotNull(result);
assertNotNull(result.getResponse());
assertFalse(result.getResponse().isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
fail(e);
}
}
@Test
@Order(3)
public void testEmbedding() {
testEndpointReachability();
try {
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
.getInstance(config.getModel(), "What is the capital of France?").build();
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
assertNotNull(embeddings);
assertFalse(embeddings.isEmpty());
} catch (IOException | OllamaBaseException | InterruptedException e) {
fail(e);
}
}
}
@Data
class Config {
private String ollamaURL;
private String model;
private String imageModel;
private int requestTimeoutSeconds;
public Config() {
Properties properties = new Properties();
try (InputStream input =
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
if (input == null) {
throw new RuntimeException("Sorry, unable to find test-config.properties");
}
properties.load(input);
this.ollamaURL = properties.getProperty("ollama.url");
this.model = properties.getProperty("ollama.model");
this.imageModel = properties.getProperty("ollama.model.image");
this.requestTimeoutSeconds =
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
} catch (IOException e) {
throw new RuntimeException("Error loading properties", e);
}
}
}

View File

@ -1,164 +0,0 @@
package io.github.ollama4j.unittests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.response.ModelDetail;
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
import io.github.ollama4j.models.response.OllamaResult;
import io.github.ollama4j.types.OllamaModelType;
import io.github.ollama4j.utils.OptionsBuilder;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import static org.mockito.Mockito.*;
class TestMockedAPIs {
@Test
void testPullModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
doNothing().when(ollamaAPI).pullModel(model);
ollamaAPI.pullModel(model);
verify(ollamaAPI, times(1)).pullModel(model);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testListModels() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
try {
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
ollamaAPI.listModels();
verify(ollamaAPI, times(1)).listModels();
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testCreateModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
try {
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testDeleteModel() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
doNothing().when(ollamaAPI).deleteModel(model, true);
ollamaAPI.deleteModel(model, true);
verify(ollamaAPI, times(1)).deleteModel(model, true);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testGetModelDetails() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
try {
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
ollamaAPI.getModelDetails(model);
verify(ollamaAPI, times(1)).getModelDetails(model);
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testGenerateEmbeddings() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
ollamaAPI.generateEmbeddings(model, prompt);
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAsk() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
OptionsBuilder optionsBuilder = new OptionsBuilder();
try {
when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generate(model, prompt, false, optionsBuilder.build());
verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskWithImageFiles() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
verify(ollamaAPI, times(1))
.generateWithImageFiles(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
} catch (IOException | OllamaBaseException | InterruptedException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskWithImageURLs() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
try {
when(ollamaAPI.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
.thenReturn(new OllamaResult("", 0, 200));
ollamaAPI.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
verify(ollamaAPI, times(1))
.generateWithImageURLs(
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Test
void testAskAsync() {
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
String model = OllamaModelType.LLAMA2;
String prompt = "some prompt text";
when(ollamaAPI.generateAsync(model, prompt, false))
.thenReturn(new OllamaAsyncResultStreamer(null, null, 3));
ollamaAPI.generateAsync(model, prompt, false);
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
}
}

Some files were not shown because too many files have changed in this diff Show More