forked from Mirror/ollama4j
Compare commits
134 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ab9e4c283 | ||
|
|
2db6a22cc7 | ||
|
|
cc69341620 | ||
|
|
4589a9032c | ||
|
|
da273402b5 | ||
|
|
cfa8aa14d7 | ||
|
|
bc4e8303aa | ||
|
|
f2f740a2a0 | ||
|
|
4cbb783a61 | ||
|
|
5c9e0b7d8a | ||
|
|
2f8577a24d | ||
|
|
02116b7025 | ||
|
|
f3778f8786 | ||
|
|
c6141634db | ||
|
|
d9f98ad901 | ||
|
|
79d97445b8 | ||
|
|
1c40697c96 | ||
|
|
f03026abb3 | ||
|
|
63a6e81ac2 | ||
|
|
76cad0f584 | ||
|
|
bee2908d1e | ||
|
|
8a4c9fd969 | ||
|
|
d470f940b0 | ||
|
|
df402efaba | ||
|
|
677362abbf | ||
|
|
81689be194 | ||
|
|
fd93036d08 | ||
|
|
c9b05a725b | ||
|
|
a4e1b4afe9 | ||
|
|
3d21813abb | ||
|
|
383d0f56ca | ||
|
|
af1b213a76 | ||
|
|
fed89a9643 | ||
|
|
fd32aa33ff | ||
|
|
b8a13e89b1 | ||
|
|
c8f27edd6e | ||
|
|
5a936d8174 | ||
|
|
9b5ddbf4c4 | ||
|
|
7c233d5734 | ||
|
|
e85aeae6e0 | ||
|
|
a05052e095 | ||
|
|
10eb803e26 | ||
|
|
bd2da8fdda | ||
|
|
b0bb082bec | ||
|
|
81f564ef7f | ||
|
|
006b52f3db | ||
|
|
16634e60e4 | ||
|
|
db8b73075b | ||
|
|
dc9f79959a | ||
|
|
88f6d00763 | ||
|
|
fd3a989a49 | ||
|
|
7580c6a549 | ||
|
|
9e6503d84b | ||
|
|
ee21f7fdd8 | ||
|
|
ecc295f484 | ||
|
|
c528fef5fc | ||
|
|
38f1bda105 | ||
|
|
d8a703503a | ||
|
|
dd9ba7c937 | ||
|
|
cf52c9610c | ||
|
|
e8d709e99a | ||
|
|
51fbedad69 | ||
|
|
953605fa73 | ||
|
|
30bfdd9c6d | ||
|
|
91ee6cb4c1 | ||
|
|
8ef6fac28e | ||
|
|
d9e3860123 | ||
|
|
515d1f0399 | ||
|
|
be549430c5 | ||
|
|
4744315d45 | ||
|
|
8eea19a539 | ||
|
|
b5801d84e0 | ||
|
|
165d04b1bb | ||
|
|
16d2160b52 | ||
|
|
e39c47b8e1 | ||
|
|
bb0785140b | ||
|
|
e33ad1a1e3 | ||
|
|
cd60c506cb | ||
|
|
b55925df28 | ||
|
|
3a9b8c309d | ||
|
|
bf07159522 | ||
|
|
f8ca4d041d | ||
|
|
9c6a55f7b0 | ||
|
|
2866d83a2f | ||
|
|
45e5d07581 | ||
|
|
3a264cb6bb | ||
|
|
e1b9d42771 | ||
|
|
1a086c37c0 | ||
|
|
54edba144c | ||
|
|
3ed3187ba9 | ||
|
|
b7cd81a7f5 | ||
|
|
e750c2d7f9 | ||
|
|
62f16131f3 | ||
|
|
2cbaf12d7c | ||
|
|
e2d555d404 | ||
|
|
c296b34174 | ||
|
|
e8f99f28ec | ||
|
|
250b1abc79 | ||
|
|
42b15ad93f | ||
|
|
6f7a714bae | ||
|
|
92618e5084 | ||
|
|
391a9242c3 | ||
|
|
e1b6dc3b54 | ||
|
|
04124cf978 | ||
|
|
e4e717b747 | ||
|
|
10d2a8f5ff | ||
|
|
899fa38805 | ||
|
|
2df878c953 | ||
|
|
78a5eedc8f | ||
|
|
364f961ee2 | ||
|
|
b21aa6add2 | ||
|
|
ec4abd1c2d | ||
|
|
9900ae92fb | ||
|
|
fa20daf6e5 | ||
|
|
44949c0559 | ||
|
|
e88711a017 | ||
|
|
32169ded18 | ||
|
|
4b2d566fd9 | ||
|
|
fb4b7a7ce5 | ||
|
|
18f27775b0 | ||
|
|
cb462ad05a | ||
|
|
1eec22ca1a | ||
|
|
c1f3c51f88 | ||
|
|
7dd556293f | ||
|
|
ee50131ce4 | ||
|
|
2cd47dbfaa | ||
|
|
e5296c1067 | ||
|
|
0f00f05e3d | ||
|
|
976a3b82e5 | ||
|
|
ba26d620c4 | ||
|
|
e45246a767 | ||
|
|
7336668f0c | ||
|
|
11701fb222 | ||
|
|
9224d2da06 |
58
.github/workflows/gh-mvn-publish.yml
vendored
Normal file
58
.github/workflows/gh-mvn-publish.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
name: Release Artifacts to GitHub Maven Packages
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ created ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up JDK 17
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: '17'
|
||||||
|
distribution: 'temurin'
|
||||||
|
server-id: github
|
||||||
|
settings-path: ${{ github.workspace }}
|
||||||
|
|
||||||
|
- name: maven-settings-xml-action
|
||||||
|
uses: whelk-io/maven-settings-xml-action@v22
|
||||||
|
with:
|
||||||
|
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "mvn-repo-id"
|
||||||
|
replace: github
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Import GPG key
|
||||||
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
|
with:
|
||||||
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
|
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||||
|
- name: List keys
|
||||||
|
run: gpg -K
|
||||||
|
|
||||||
|
- name: Build with Maven
|
||||||
|
run: mvn --file pom.xml -U clean package -Punit-tests
|
||||||
|
|
||||||
|
- name: Publish to GitHub Packages Apache Maven
|
||||||
|
run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
121
.github/workflows/maven-publish.yml
vendored
121
.github/workflows/maven-publish.yml
vendored
@@ -1,68 +1,95 @@
|
|||||||
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
||||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
||||||
|
|
||||||
name: Test and Publish Package
|
name: Release Artifacts to Maven Central
|
||||||
|
|
||||||
#on:
|
|
||||||
# release:
|
|
||||||
# types: [ "created" ]
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
branches: [ "main" ]
|
types: [ created ]
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
|
#on:
|
||||||
|
# pull_request:
|
||||||
|
# types: [ opened, reopened ]
|
||||||
|
# branches: [ "main" ]
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up JDK 11
|
|
||||||
|
- name: Set up JDK 17
|
||||||
uses: actions/setup-java@v3
|
uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
java-version: '11'
|
java-version: '17'
|
||||||
distribution: 'adopt-hotspot'
|
distribution: 'temurin'
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||||
- name: Build with Maven
|
|
||||||
run: mvn --file pom.xml -U clean package -Punit-tests
|
- name: maven-settings-xml-action
|
||||||
- name: Set up Apache Maven Central (Overwrite settings.xml)
|
uses: whelk-io/maven-settings-xml-action@v22
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with: # running setup-java again overwrites the settings.xml
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
cache: 'maven'
|
|
||||||
server-id: ossrh
|
|
||||||
server-username: MAVEN_USERNAME
|
|
||||||
server-password: MAVEN_PASSWORD
|
|
||||||
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
|
||||||
gpg-passphrase: MAVEN_GPG_PASSPHRASE
|
|
||||||
- name: Set up Maven cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
with:
|
||||||
path: ~/.m2/repository
|
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
|
||||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
|
||||||
restore-keys: |
|
- name: Import GPG key
|
||||||
${{ runner.os }}-maven-
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
- name: Build
|
with:
|
||||||
run: mvn -B -ntp clean install
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
- name: Upload coverage reports to Codecov
|
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||||
uses: codecov/codecov-action@v3
|
- name: List keys
|
||||||
|
run: gpg -K
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "mvn-repo-id"
|
||||||
|
replace: central
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Publish to Maven Central
|
||||||
|
run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
|
||||||
|
|
||||||
|
- name: Upload Release Asset - JAR
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Publish to GitHub Packages Apache Maven
|
with:
|
||||||
# if: >
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
# github.event_name != 'pull_request' &&
|
asset_path: target/ollama4j-${{ github.ref_name }}.jar
|
||||||
# github.ref_name == 'main' &&
|
asset_name: ollama4j-${{ github.ref_name }}.jar
|
||||||
# contains(github.event.head_commit.message, 'release')
|
asset_content_type: application/x-jar
|
||||||
run: |
|
|
||||||
git config --global user.email "koujalgi.amith@gmail.com"
|
- name: Upload Release Asset - Javadoc JAR
|
||||||
git config --global user.name "amithkoujalgi"
|
uses: actions/upload-release-asset@v1
|
||||||
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
|
|
||||||
env:
|
env:
|
||||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
with:
|
||||||
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
|
asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
|
||||||
|
asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
|
||||||
|
asset_content_type: application/x-jar
|
||||||
|
|
||||||
|
- name: Upload Release Asset - Sources JAR
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
|
asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
|
||||||
|
asset_name: ollama4j-${{ github.ref_name }}-sources.jar
|
||||||
|
asset_content_type: application/x-jar
|
||||||
12
.github/workflows/publish-docs.yml
vendored
12
.github/workflows/publish-docs.yml
vendored
@@ -2,9 +2,8 @@
|
|||||||
name: Deploy Docs to GH Pages
|
name: Deploy Docs to GH Pages
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Runs on pushes targeting the default branch
|
release:
|
||||||
push:
|
types: [ created ]
|
||||||
branches: [ "main" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -47,6 +46,13 @@ jobs:
|
|||||||
- run: cd docs && npm ci
|
- run: cd docs && npm ci
|
||||||
- run: cd docs && npm run build
|
- run: cd docs && npm run build
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
- name: Build with Maven
|
- name: Build with Maven
|
||||||
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
||||||
|
|
||||||
|
|||||||
52
.github/workflows/publish-javadoc.yml
vendored
52
.github/workflows/publish-javadoc.yml
vendored
@@ -1,52 +0,0 @@
|
|||||||
# Simple workflow for deploying static content to GitHub Pages
|
|
||||||
name: Deploy Javadoc content to Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Runs on pushes targeting the default branch
|
|
||||||
push:
|
|
||||||
branches: [ "none" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
packages: write
|
|
||||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
|
||||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
|
||||||
concurrency:
|
|
||||||
group: "pages"
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Single deploy job since we're just deploying
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
environment:
|
|
||||||
name: github-pages
|
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up JDK 11
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
|
||||||
- name: Build with Maven
|
|
||||||
run: mvn --file pom.xml -U clean package
|
|
||||||
- name: Setup Pages
|
|
||||||
uses: actions/configure-pages@v3
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-pages-artifact@v2
|
|
||||||
with:
|
|
||||||
# Upload entire repository
|
|
||||||
path: './target/apidocs/.'
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
id: deployment
|
|
||||||
uses: actions/deploy-pages@v2
|
|
||||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
koujalgi.amith@gmail.com.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
||||||
4
Makefile
4
Makefile
@@ -1,10 +1,10 @@
|
|||||||
build:
|
build:
|
||||||
mvn -B clean install
|
mvn -B clean install
|
||||||
|
|
||||||
ut:
|
unit-tests:
|
||||||
mvn clean test -Punit-tests
|
mvn clean test -Punit-tests
|
||||||
|
|
||||||
it:
|
integration-tests:
|
||||||
mvn clean verify -Pintegration-tests
|
mvn clean verify -Pintegration-tests
|
||||||
|
|
||||||
doxygen:
|
doxygen:
|
||||||
|
|||||||
221
README.md
221
README.md
@@ -1,31 +1,46 @@
|
|||||||
### Ollama4j
|
### Ollama4j
|
||||||
|
|
||||||
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
<p align="center">
|
||||||
|
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
|
||||||
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
|
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
|
||||||
|
|
||||||
Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
|
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||

|

|
||||||

|

|
||||||

|

|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
[](https://codecov.io/gh/amithkoujalgi/ollama4j)
|
[//]: # ()
|
||||||
|
|
||||||

|
[//]: # ()
|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
[//]: # ()
|
||||||
|
[//]: # ()
|
||||||
|

|
||||||
|
|
||||||
|

|
||||||
|
[](https://codecov.io/gh/ollama4j/ollama4j)
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
- [How does it work?](#how-does-it-work)
|
- [How does it work?](#how-does-it-work)
|
||||||
- [Requirements](#requirements)
|
- [Requirements](#requirements)
|
||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
- [API Spec](#api-spec)
|
- [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management)
|
||||||
- [Demo APIs](#try-out-the-apis-with-ollama-server)
|
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
|
||||||
- [Development](#development)
|
- [Development](#development)
|
||||||
- [Contributions](#get-involved)
|
- [Contributions](#get-involved)
|
||||||
- [References](#references)
|
- [References](#references)
|
||||||
@@ -46,44 +61,132 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
|
|||||||
|
|
||||||
#### Requirements
|
#### Requirements
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
[![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker]
|
[![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker]
|
||||||
|
|
||||||
[ollama]: https://ollama.ai/
|
[ollama-link]: https://ollama.ai/
|
||||||
|
|
||||||
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray
|
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
|
||||||
|
|
||||||
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
|
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
|
||||||
|
|
||||||
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray
|
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
|
||||||
|
|
||||||
#### Installation
|
## Installation
|
||||||
|
|
||||||
In your Maven project, add this dependency:
|
> [!NOTE]
|
||||||
|
> We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with
|
||||||
|
> publishing. Please update your repository settings to get latest version of Ollama4j.
|
||||||
|
>
|
||||||
|
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
|
||||||
|
> according to your requirements.
|
||||||
|
|
||||||
|
### For Maven
|
||||||
|
|
||||||
|
#### Using [Maven Central](https://central.sonatype.com/)
|
||||||
|
|
||||||
|
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
|
||||||
|
|
||||||
|
[ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases
|
||||||
|
|
||||||
|
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20
|
||||||
|
|
||||||
|
1In your Maven project, add this dependency:
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.ollama4j</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.57</version>
|
<version>1.0.78</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
Latest release:
|
#### Using GitHub's Maven Package Repository
|
||||||
|
|
||||||

|
[![][ollama4j-releases-shield]][ollama4j-releases-link]
|
||||||
|
|
||||||
[![][lib-shield]][lib]
|
[ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
|
||||||
|
|
||||||
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
|
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20
|
||||||
|
|
||||||
|
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>github</id>
|
||||||
|
<name>GitHub Apache Maven Packages</name>
|
||||||
|
<url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
|
||||||
|
<releases>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</releases>
|
||||||
|
<snapshots>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</snapshots>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||||
|
http://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||||
|
<servers>
|
||||||
|
<server>
|
||||||
|
<id>github</id>
|
||||||
|
<username>YOUR-USERNAME</username>
|
||||||
|
<password>YOUR-TOKEN</password>
|
||||||
|
</server>
|
||||||
|
</servers>
|
||||||
|
</settings>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. In your Maven project, add this dependency:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.github.ollama4j</groupId>
|
||||||
|
<artifactId>ollama4j</artifactId>
|
||||||
|
<version>1.0.78</version>
|
||||||
|
</dependency>
|
||||||
|
```
|
||||||
|
|
||||||
|
##### For Gradle
|
||||||
|
|
||||||
|
1. Add the dependency
|
||||||
|
|
||||||
|
```groovy
|
||||||
|
dependencies {
|
||||||
|
implementation 'com.github.ollama4j:ollama4j:1.0.78'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[//]: # (Latest release:)
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ([![][lib-shield]][lib])
|
||||||
|
|
||||||
|
[lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
|
||||||
|
|
||||||
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
|
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
|
||||||
|
|
||||||
#### API Spec
|
#### API Spec
|
||||||
|
|
||||||
Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/).
|
> [!TIP]
|
||||||
|
> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
|
||||||
|
|
||||||
#### Development
|
#### Development
|
||||||
|
|
||||||
@@ -96,23 +199,32 @@ make build
|
|||||||
Run unit tests:
|
Run unit tests:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
make ut
|
make unit-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
Run integration tests:
|
Run integration tests:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
make it
|
make integration-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Releases
|
#### Releases
|
||||||
|
|
||||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
|
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
|
||||||
Actions CI workflow.
|
|
||||||
|
#### Who's using Ollama4j?
|
||||||
|
|
||||||
|
- `Datafaker`: a library to generate fake data
|
||||||
|
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
|
||||||
|
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
|
||||||
|
- https://github.com/TEAMPB/ollama4j-vaadin-ui
|
||||||
|
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
|
||||||
|
server to translate all messages into a specfic target language.
|
||||||
|
- https://github.com/liebki/ollama-translator
|
||||||
|
|
||||||
#### Traction
|
#### Traction
|
||||||
|
|
||||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
|
[](https://star-history.com/#ollama4j/ollama4j&Date)
|
||||||
|
|
||||||
### Areas of improvement
|
### Areas of improvement
|
||||||
|
|
||||||
@@ -124,6 +236,9 @@ Actions CI workflow.
|
|||||||
- [x] Use lombok
|
- [x] Use lombok
|
||||||
- [x] Update request body creation with Java objects
|
- [x] Update request body creation with Java objects
|
||||||
- [ ] Async APIs for images
|
- [ ] Async APIs for images
|
||||||
|
- [ ] Support for function calling with models like Mistral
|
||||||
|
- [x] generate in sync mode
|
||||||
|
- [ ] generate in async mode
|
||||||
- [ ] Add custom headers to requests
|
- [ ] Add custom headers to requests
|
||||||
- [x] Add additional params for `ask` APIs such as:
|
- [x] Add additional params for `ask` APIs such as:
|
||||||
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
|
- [x] `options`: additional model parameters for the Modelfile such as `temperature` -
|
||||||
@@ -139,15 +254,51 @@ Actions CI workflow.
|
|||||||
|
|
||||||
### Get Involved
|
### Get Involved
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
|
||||||
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
|
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
|
||||||
with code - any sort
|
with code - any sort
|
||||||
of contribution is much appreciated.
|
of contribution is much appreciated.
|
||||||
|
|
||||||
|
### References
|
||||||
|
|
||||||
|
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
||||||
|
|
||||||
### Credits
|
### Credits
|
||||||
|
|
||||||
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
||||||
project.
|
project.
|
||||||
|
|
||||||
### References
|
**Thanks to the amazing contributors**
|
||||||
|
|
||||||
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
<p align="center">
|
||||||
|
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" />
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
### Appreciate my work?
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
|
||||||
|
</p>
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
|
|||||||
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
|
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
|
||||||
applications! 🌐🚀
|
applications! 🌐🚀
|
||||||
|
|
||||||
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
|
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
|
||||||
|
|
||||||
🌟 Key Features:
|
🌟 Key Features:
|
||||||
|
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
|
|||||||
|
|
||||||
Also, see how to set those Ollama parameters using
|
Also, see how to set those Ollama parameters using
|
||||||
the `OptionsBuilder`
|
the `OptionsBuilder`
|
||||||
from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html).
|
from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
|
||||||
|
|
||||||
## Build an empty `Options` object
|
## Build an empty `Options` object
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ sidebar_position: 7
|
|||||||
|
|
||||||
# Chat
|
# Chat
|
||||||
|
|
||||||
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
|
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
|
||||||
information using the history of already asked questions and the respective answers.
|
information using the history of already asked questions and the respective answers.
|
||||||
|
|
||||||
## Create a new conversation and use chat history to augment follow up questions
|
## Create a new conversation and use chat history to augment follow up questions
|
||||||
@@ -20,8 +20,8 @@ public class Main {
|
|||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
|
||||||
// create first user question
|
// create first user question
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?")
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// start conversation with model
|
// start conversation with model
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
@@ -29,7 +29,7 @@ public class Main {
|
|||||||
System.out.println("First answer: " + chatResult.getResponse());
|
System.out.println("First answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
// create next userQuestion
|
// create next userQuestion
|
||||||
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build();
|
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
|
||||||
|
|
||||||
// "continue" conversation with model
|
// "continue" conversation with model
|
||||||
chatResult = ollamaAPI.chat(requestModel);
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
@@ -41,32 +41,38 @@ public class Main {
|
|||||||
}
|
}
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> First answer: Should be Paris!
|
> First answer: Should be Paris!
|
||||||
>
|
>
|
||||||
> Second answer: Marseille.
|
> Second answer: Marseille.
|
||||||
>
|
>
|
||||||
> Chat History:
|
> Chat History:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
[ {
|
[
|
||||||
"role" : "user",
|
{
|
||||||
"content" : "What is the capital of France?",
|
"role": "user",
|
||||||
"images" : [ ]
|
"content": "What is the capital of France?",
|
||||||
}, {
|
"images": []
|
||||||
"role" : "assistant",
|
},
|
||||||
"content" : "Should be Paris!",
|
{
|
||||||
"images" : [ ]
|
"role": "assistant",
|
||||||
}, {
|
"content": "Should be Paris!",
|
||||||
"role" : "user",
|
"images": []
|
||||||
"content" : "And what is the second largest city?",
|
},
|
||||||
"images" : [ ]
|
{
|
||||||
}, {
|
"role": "user",
|
||||||
"role" : "assistant",
|
"content": "And what is the second largest city?",
|
||||||
"content" : "Marseille.",
|
"images": []
|
||||||
"images" : [ ]
|
},
|
||||||
} ]
|
{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": "Marseille.",
|
||||||
|
"images": []
|
||||||
|
}
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Create a conversation where the answer is streamed
|
## Create a conversation where the answer is streamed
|
||||||
@@ -81,30 +87,50 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// define a handler (Consumer<String>)
|
// define a handler (Consumer<String>)
|
||||||
OllamaStreamHandler streamHandler = (s) -> {
|
OllamaStreamHandler streamHandler = (s) -> {
|
||||||
System.out.println(s);
|
System.out.println(s);
|
||||||
};
|
};
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> The
|
> The
|
||||||
> The capital
|
> The capital
|
||||||
> The capital of
|
> The capital of
|
||||||
> The capital of France
|
> The capital of France
|
||||||
> The capital of France is
|
> The capital of France is
|
||||||
> The capital of France is Paris
|
> The capital of France is Paris
|
||||||
> The capital of France is Paris.
|
> The capital of France is Paris.
|
||||||
|
|
||||||
|
## Use a simple Console Output Stream Handler
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
|
||||||
|
.build();
|
||||||
|
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
|
||||||
|
ollamaAPI.chat(requestModel, streamHandler);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Create a new conversation with individual system prompt
|
## Create a new conversation with individual system prompt
|
||||||
|
|
||||||
```java
|
```java
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
@@ -117,8 +143,8 @@ public class Main {
|
|||||||
|
|
||||||
// create request with system-prompt (overriding the model defaults) and user question
|
// create request with system-prompt (overriding the model defaults) and user question
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
.withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?")
|
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// start conversation with model
|
// start conversation with model
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
@@ -128,6 +154,7 @@ public class Main {
|
|||||||
}
|
}
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> NI.
|
> NI.
|
||||||
@@ -139,34 +166,40 @@ public class Main {
|
|||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
|
||||||
|
|
||||||
// Load Image from File and attach to user message (alternatively images could also be added via URL)
|
// Load Image from File and attach to user message (alternatively images could also be added via URL)
|
||||||
OllamaChatRequestModel requestModel =
|
OllamaChatRequestModel requestModel =
|
||||||
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
System.out.println("First answer: " + chatResult.getResponse());
|
System.out.println("First answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
builder.reset();
|
builder.reset();
|
||||||
|
|
||||||
// Use history to ask further questions about the image or assistant answer
|
// Use history to ask further questions about the image or assistant answer
|
||||||
requestModel =
|
requestModel =
|
||||||
builder.withMessages(chatResult.getChatHistory())
|
builder.withMessages(chatResult.getChatHistory())
|
||||||
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
||||||
|
|
||||||
chatResult = ollamaAPI.chat(requestModel);
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
System.out.println("Second answer: " + chatResult.getResponse());
|
System.out.println("Second answer: " + chatResult.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early evening, given the warm lighting and the low position of the sun in the sky.
|
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
|
||||||
|
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
|
||||||
|
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
|
||||||
|
> evening, given the warm lighting and the low position of the sun in the sky.
|
||||||
>
|
>
|
||||||
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently.
|
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
|
||||||
|
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
|
||||||
|
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
|
||||||
|
> confidently.
|
||||||
@@ -5,38 +5,42 @@ sidebar_position: 2
|
|||||||
# Generate - Async
|
# Generate - Async
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a asynchronous way.
|
This API lets you ask questions to the LLMs in a asynchronous way.
|
||||||
These APIs correlate to
|
This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
background (such as threads) without blocking your code until the response arrives from the model.
|
||||||
|
|
||||||
|
This API corresponds to
|
||||||
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
```java
|
```java
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
String prompt = "List all cricket world cup teams of 2019.";
|
||||||
|
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
|
||||||
|
|
||||||
String prompt = "Who are you?";
|
// Set the poll interval according to your needs.
|
||||||
|
// Smaller the poll interval, more frequently you receive the tokens.
|
||||||
|
int pollIntervalMilliseconds = 1000;
|
||||||
|
|
||||||
OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
|
while (true) {
|
||||||
|
String tokens = streamer.getStream().poll();
|
||||||
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
|
System.out.print(tokens);
|
||||||
// poll for data from the response stream
|
if (!streamer.isAlive()) {
|
||||||
String result = callback.getStream().poll();
|
break;
|
||||||
if (result != null) {
|
|
||||||
System.out.print(result);
|
|
||||||
}
|
}
|
||||||
Thread.sleep(100);
|
Thread.sleep(pollIntervalMilliseconds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
System.out.println("\n------------------------");
|
||||||
|
System.out.println("Complete Response:");
|
||||||
|
System.out.println("------------------------");
|
||||||
|
|
||||||
|
System.out.println(streamer.getResult());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a steaming response.
|
||||||
|
|
||||||
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
|
|
||||||
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
|
|
||||||
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
|
|
||||||
> require
|
|
||||||
> natural language understanding and generation capabilities.
|
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate - With Image Files
|
# Generate - With Image Files
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
This API corresponds to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 4
|
sidebar_position: 5
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate - With Image URLs
|
# Generate - With Image URLs
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
This API corresponds to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
|
||||||
|
|||||||
368
docs/docs/apis-generate/generate-with-tools.md
Normal file
368
docs/docs/apis-generate/generate-with-tools.md
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate - With Tools
|
||||||
|
|
||||||
|
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
|
||||||
|
synchronous way.
|
||||||
|
This API corresponds to
|
||||||
|
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
This is an only an experimental implementation and has a very basic design.
|
||||||
|
|
||||||
|
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
|
||||||
|
this
|
||||||
|
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Function Calling/Tools
|
||||||
|
|
||||||
|
Assume you want to call a method in your code based on the response generated from the model.
|
||||||
|
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
|
||||||
|
transaction from your database and respond to the user with the transaction details.
|
||||||
|
|
||||||
|
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
|
||||||
|
|
||||||
|
### Create Functions
|
||||||
|
|
||||||
|
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
|
||||||
|
fuel price value.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
|
||||||
|
location.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
|
||||||
|
details.
|
||||||
|
|
||||||
|
```java
|
||||||
|
class DBQueryFunction implements ToolFunction {
|
||||||
|
@Override
|
||||||
|
public Object apply(Map<String, Object> arguments) {
|
||||||
|
// perform DB operations here
|
||||||
|
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Define Tool Specifications
|
||||||
|
|
||||||
|
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
|
||||||
|
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDescription("Get current fuel price")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` property (`city`).
|
||||||
|
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDescription("Get current weather")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` property (`employee-name`).
|
||||||
|
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("get-employee-details")
|
||||||
|
.functionDescription("Get employee details from the database")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||||
|
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||||
|
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(new DBQueryFunction())
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Register the Tools
|
||||||
|
|
||||||
|
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
ollamaAPI.registerTool(databaseQueryToolSpecification);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create prompt with Tools
|
||||||
|
|
||||||
|
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt1 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt2 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Again, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt3 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withToolSpecification(databaseQueryToolSpecification)
|
||||||
|
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Again, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||||
|
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Full Example
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.tools.Tools;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public class FunctionCallingWithMistralExample {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
|
||||||
|
String model = "mistral";
|
||||||
|
|
||||||
|
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDescription("Get current fuel price")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDescription("Get current weather")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("get-employee-details")
|
||||||
|
.functionDescription("Get employee details from the database")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||||
|
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||||
|
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(new DBQueryFunction())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
ollamaAPI.registerTool(databaseQueryToolSpecification);
|
||||||
|
|
||||||
|
String prompt1 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt1);
|
||||||
|
|
||||||
|
String prompt2 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt2);
|
||||||
|
|
||||||
|
String prompt3 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withToolSpecification(databaseQueryToolSpecification)
|
||||||
|
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt3);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SampleTools {
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
// Get details from fuel price API
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
// Get details from weather API
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class DBQueryFunction implements ToolFunction {
|
||||||
|
@Override
|
||||||
|
public Object apply(Map<String, Object> arguments) {
|
||||||
|
// perform DB operations here
|
||||||
|
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Run this full example and you will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||||
|
|
||||||
|
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||||
|
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Room for improvement
|
||||||
|
|
||||||
|
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
|
||||||
|
registration. For example:
|
||||||
|
|
||||||
|
```java
|
||||||
|
|
||||||
|
@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
|
||||||
|
public String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
|
||||||
|
specific args separately with their data types. For example:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
public String getCurrentFuelPrice(String location, String fuelType) {
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Updating async/chat APIs with support for tool-based generation.
|
||||||
@@ -5,13 +5,13 @@ sidebar_position: 1
|
|||||||
# Generate - Sync
|
# Generate - Sync
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a synchronous way.
|
This API lets you ask questions to the LLMs in a synchronous way.
|
||||||
These APIs correlate to
|
This API corresponds to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
Use the `OptionBuilder` to build the `Options` object
|
Use the `OptionBuilder` to build the `Options` object
|
||||||
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||||
Refer
|
Refer
|
||||||
to [this](/docs/apis-extras/options-builder).
|
to [this](/apis-extras/options-builder).
|
||||||
|
|
||||||
## Try asking a question about the model.
|
## Try asking a question about the model.
|
||||||
|
|
||||||
@@ -53,25 +53,26 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
// define a stream handler (Consumer<String>)
|
// define a stream handler (Consumer<String>)
|
||||||
OllamaStreamHandler streamHandler = (s) -> {
|
OllamaStreamHandler streamHandler = (s) -> {
|
||||||
System.out.println(s);
|
System.out.println(s);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Should be called using seperate thread to gain non blocking streaming effect.
|
// Should be called using seperate thread to gain non blocking streaming effect.
|
||||||
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().build(), streamHandler);
|
new OptionsBuilder().build(), streamHandler);
|
||||||
|
|
||||||
System.out.println("Full response: " +result.getResponse());
|
System.out.println("Full response: " + result.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
> The
|
> The
|
||||||
> The capital
|
> The capital
|
||||||
> The capital of
|
> The capital of
|
||||||
> The capital of France
|
> The capital of France
|
||||||
> The capital of France is
|
> The capital of France is
|
||||||
> The capital of France is Paris
|
> The capital of France is Paris
|
||||||
> The capital of France is Paris.
|
> The capital of France is Paris.
|
||||||
> Full response: The capital of France is Paris.
|
> Full response: The capital of France is Paris.
|
||||||
@@ -140,7 +141,7 @@ public class Main {
|
|||||||
```
|
```
|
||||||
|
|
||||||
_Note: Here I've used
|
_Note: Here I've used
|
||||||
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
|
a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
|
||||||
containing a database schema from within this library for demonstration purposes._
|
containing a database schema from within this library for demonstration purposes._
|
||||||
|
|
||||||
You'd then get a response from the model:
|
You'd then get a response from the model:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 5
|
sidebar_position: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
# Prompt Builder
|
# Prompt Builder
|
||||||
@@ -42,7 +42,7 @@ public class AskPhi {
|
|||||||
.addSeparator()
|
.addSeparator()
|
||||||
.add("How do I read a file in Go and print its contents to stdout?");
|
.add("How do I read a file in Go and print its contents to stdout?");
|
||||||
|
|
||||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
|
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
|
||||||
System.out.println(response.getResponse());
|
System.out.println(response.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ Add the dependency to your project's `pom.xml`.
|
|||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
|
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
|
||||||
|
|
||||||
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
|
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
|
||||||
example,
|
example,
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ const config = {
|
|||||||
|
|
||||||
// GitHub pages deployment config.
|
// GitHub pages deployment config.
|
||||||
// If you aren't using GitHub pages, you don't need these.
|
// If you aren't using GitHub pages, you don't need these.
|
||||||
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
|
organizationName: 'ollama4j', // Usually your GitHub org/user name.
|
||||||
projectName: 'ollama4j', // Usually your repo name.
|
projectName: 'ollama4j', // Usually your repo name.
|
||||||
|
|
||||||
onBrokenLinks: 'throw',
|
onBrokenLinks: 'throw',
|
||||||
@@ -40,18 +40,20 @@ const config = {
|
|||||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||||
({
|
({
|
||||||
docs: {
|
docs: {
|
||||||
|
path: 'docs',
|
||||||
|
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
|
||||||
sidebarPath: './sidebars.js',
|
sidebarPath: './sidebars.js',
|
||||||
// Please change this to your repo.
|
// Please change this to your repo.
|
||||||
// Remove this to remove the "edit this page" links.
|
// Remove this to remove the "edit this page" links.
|
||||||
editUrl:
|
editUrl:
|
||||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
'https://github.com/ollama4j/ollama4j/blob/main/docs',
|
||||||
},
|
},
|
||||||
blog: {
|
blog: {
|
||||||
showReadingTime: true,
|
showReadingTime: true,
|
||||||
// Please change this to your repo.
|
// Please change this to your repo.
|
||||||
// Remove this to remove the "edit this page" links.
|
// Remove this to remove the "edit this page" links.
|
||||||
editUrl:
|
editUrl:
|
||||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
'https://github.com/ollama4j/ollama4j/blob/main/docs',
|
||||||
},
|
},
|
||||||
theme: {
|
theme: {
|
||||||
customCss: './src/css/custom.css',
|
customCss: './src/css/custom.css',
|
||||||
@@ -78,11 +80,11 @@ const config = {
|
|||||||
position: 'left',
|
position: 'left',
|
||||||
label: 'Docs',
|
label: 'Docs',
|
||||||
},
|
},
|
||||||
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
{to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
||||||
{to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
|
{to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
|
||||||
{to: '/blog', label: 'Blog', position: 'left'},
|
{to: '/blog', label: 'Blog', position: 'left'},
|
||||||
{
|
{
|
||||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
href: 'https://github.com/ollama4j/ollama4j',
|
||||||
label: 'GitHub',
|
label: 'GitHub',
|
||||||
position: 'right',
|
position: 'right',
|
||||||
},
|
},
|
||||||
@@ -96,7 +98,7 @@ const config = {
|
|||||||
items: [
|
items: [
|
||||||
{
|
{
|
||||||
label: 'Tutorial',
|
label: 'Tutorial',
|
||||||
to: '/docs/intro',
|
to: '/intro',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@@ -122,7 +124,7 @@ const config = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'GitHub',
|
label: 'GitHub',
|
||||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
href: 'https://github.com/ollama4j/ollama4j',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|||||||
1947
docs/package-lock.json
generated
1947
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -14,9 +14,9 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "3.0.1",
|
"@docusaurus/core": "^3.4.0",
|
||||||
"@docusaurus/preset-classic": "3.0.1",
|
"@docusaurus/preset-classic": "^3.4.0",
|
||||||
"@docusaurus/theme-mermaid": "^3.0.1",
|
"@docusaurus/theme-mermaid": "^3.4.0",
|
||||||
"@mdx-js/react": "^3.0.0",
|
"@mdx-js/react": "^3.0.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"prism-react-renderer": "^2.3.0",
|
"prism-react-renderer": "^2.3.0",
|
||||||
@@ -24,8 +24,8 @@
|
|||||||
"react-dom": "^18.0.0"
|
"react-dom": "^18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@docusaurus/module-type-aliases": "3.0.1",
|
"@docusaurus/module-type-aliases": "^3.4.0",
|
||||||
"@docusaurus/types": "3.0.1"
|
"@docusaurus/types": "^3.4.0"
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
"production": [
|
"production": [
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ function HomepageHeader() {
|
|||||||
<div className={styles.buttons}>
|
<div className={styles.buttons}>
|
||||||
<Link
|
<Link
|
||||||
className="button button--secondary button--lg"
|
className="button button--secondary button--lg"
|
||||||
to="/docs/intro">
|
to="/intro">
|
||||||
Getting Started
|
Getting Started
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
106
pom.xml
106
pom.xml
@@ -1,14 +1,16 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.ollama4j</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.58</version>
|
<version>ollama4j-revision</version>
|
||||||
|
|
||||||
<name>Ollama4j</name>
|
<name>Ollama4j</name>
|
||||||
<description>Java library for interacting with Ollama API.</description>
|
<description>Java library for interacting with Ollama API.</description>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/ollama4j/ollama4j</url>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<maven.compiler.source>11</maven.compiler.source>
|
<maven.compiler.source>11</maven.compiler.source>
|
||||||
@@ -31,15 +33,15 @@
|
|||||||
<licenses>
|
<licenses>
|
||||||
<license>
|
<license>
|
||||||
<name>MIT License</name>
|
<name>MIT License</name>
|
||||||
<url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url>
|
<url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
|
||||||
</license>
|
</license>
|
||||||
</licenses>
|
</licenses>
|
||||||
|
|
||||||
<scm>
|
<scm>
|
||||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
<connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
|
||||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
<developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/ollama4j/ollama4j</url>
|
||||||
<tag>v1.0.58</tag>
|
<tag>ollama4j-revision</tag>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@@ -70,27 +72,7 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<!-- <plugin>-->
|
|
||||||
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
|
||||||
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
|
|
||||||
<!-- <version>1.5</version>-->
|
|
||||||
<!-- <executions>-->
|
|
||||||
<!-- <execution>-->
|
|
||||||
<!-- <id>sign-artifacts</id>-->
|
|
||||||
<!-- <phase>verify</phase>-->
|
|
||||||
<!-- <goals>-->
|
|
||||||
<!-- <goal>sign</goal>-->
|
|
||||||
<!-- </goals>-->
|
|
||||||
<!-- <configuration>-->
|
|
||||||
<!-- <!– This is necessary for gpg to not try to use the pinentry programs –>-->
|
|
||||||
<!-- <gpgArguments>-->
|
|
||||||
<!-- <arg>--pinentry-mode</arg>-->
|
|
||||||
<!-- <arg>loopback</arg>-->
|
|
||||||
<!-- </gpgArguments>-->
|
|
||||||
<!-- </configuration>-->
|
|
||||||
<!-- </execution>-->
|
|
||||||
<!-- </executions>-->
|
|
||||||
<!-- </plugin>-->
|
|
||||||
<!-- Surefire Plugin for Unit Tests -->
|
<!-- Surefire Plugin for Unit Tests -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
@@ -127,15 +109,23 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-release-plugin</artifactId>
|
<artifactId>maven-gpg-plugin</artifactId>
|
||||||
<version>3.0.1</version>
|
<version>1.5</version>
|
||||||
<configuration>
|
<executions>
|
||||||
<!-- <goals>install</goals>-->
|
<execution>
|
||||||
<tagNameFormat>v@{project.version}</tagNameFormat>
|
<id>sign-artifacts</id>
|
||||||
</configuration>
|
<phase>verify</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>sign</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
@@ -149,12 +139,17 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
<groupId>com.fasterxml.jackson.core</groupId>
|
||||||
<artifactId>jackson-databind</artifactId>
|
<artifactId>jackson-databind</artifactId>
|
||||||
<version>2.15.3</version>
|
<version>2.17.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||||
|
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||||
|
<version>2.17.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-classic</artifactId>
|
<artifactId>logback-classic</artifactId>
|
||||||
<version>1.4.12</version>
|
<version>1.5.6</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -183,17 +178,38 @@
|
|||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<distributionManagement>
|
<distributionManagement>
|
||||||
<snapshotRepository>
|
|
||||||
<id>ossrh</id>
|
|
||||||
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
|
|
||||||
</snapshotRepository>
|
|
||||||
<repository>
|
<repository>
|
||||||
<id>ossrh</id>
|
<id>mvn-repo-id</id>
|
||||||
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
|
|
||||||
</repository>
|
</repository>
|
||||||
</distributionManagement>
|
</distributionManagement>
|
||||||
|
|
||||||
<profiles>
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>ossrh</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>true</activeByDefault>
|
||||||
|
</activation>
|
||||||
|
<properties>
|
||||||
|
<gpg.executable>gpg2</gpg.executable>
|
||||||
|
<test.env>unit</test.env>
|
||||||
|
<skipUnitTests>false</skipUnitTests>
|
||||||
|
<skipIntegrationTests>true</skipIntegrationTests>
|
||||||
|
</properties>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.sonatype.central</groupId>
|
||||||
|
<artifactId>central-publishing-maven-plugin</artifactId>
|
||||||
|
<version>0.5.0</version>
|
||||||
|
<extensions>true</extensions>
|
||||||
|
<configuration>
|
||||||
|
<publishingServerId>mvn-repo-id</publishingServerId>
|
||||||
|
<autoPublish>true</autoPublish>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
<profile>
|
<profile>
|
||||||
<id>unit-tests</id>
|
<id>unit-tests</id>
|
||||||
<properties>
|
<properties>
|
||||||
@@ -202,7 +218,7 @@
|
|||||||
<skipIntegrationTests>true</skipIntegrationTests>
|
<skipIntegrationTests>true</skipIntegrationTests>
|
||||||
</properties>
|
</properties>
|
||||||
<activation>
|
<activation>
|
||||||
<activeByDefault>true</activeByDefault>
|
<activeByDefault>false</activeByDefault>
|
||||||
</activation>
|
</activation>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,18 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.Queue;
|
||||||
|
|
||||||
|
public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
|
||||||
|
@Override
|
||||||
|
public String poll() {
|
||||||
|
StringBuilder tokens = new StringBuilder();
|
||||||
|
Iterator<String> iterator = this.listIterator();
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
tokens.append(iterator.next());
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
|
return tokens.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core;
|
|
||||||
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
public interface OllamaStreamHandler extends Consumer<String>{
|
|
||||||
void accept(String message);
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.exceptions;
|
||||||
|
|
||||||
|
public class ToolInvocationException extends Exception {
|
||||||
|
|
||||||
|
public ToolInvocationException(String s, Exception e) {
|
||||||
|
super(s, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.exceptions;
|
||||||
|
|
||||||
|
public class ToolNotFoundException extends Exception {
|
||||||
|
|
||||||
|
public ToolNotFoundException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.impl;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||||
|
|
||||||
|
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
|
||||||
|
private final StringBuffer response = new StringBuffer();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(String message) {
|
||||||
|
String substr = message.substring(response.length());
|
||||||
|
response.append(substr);
|
||||||
|
System.out.print(substr);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
@@ -11,7 +14,9 @@ public class Model {
|
|||||||
private String name;
|
private String name;
|
||||||
private String model;
|
private String model;
|
||||||
@JsonProperty("modified_at")
|
@JsonProperty("modified_at")
|
||||||
private String modifiedAt;
|
private OffsetDateTime modifiedAt;
|
||||||
|
@JsonProperty("expires_at")
|
||||||
|
private OffsetDateTime expiresAt;
|
||||||
private String digest;
|
private String digest;
|
||||||
private long size;
|
private long size;
|
||||||
@JsonProperty("details")
|
@JsonProperty("details")
|
||||||
|
|||||||
@@ -1,143 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.net.http.HttpClient;
|
|
||||||
import java.net.http.HttpRequest;
|
|
||||||
import java.net.http.HttpResponse;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.Queue;
|
|
||||||
import lombok.Data;
|
|
||||||
import lombok.EqualsAndHashCode;
|
|
||||||
import lombok.Getter;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@EqualsAndHashCode(callSuper = true)
|
|
||||||
@SuppressWarnings("unused")
|
|
||||||
public class OllamaAsyncResultCallback extends Thread {
|
|
||||||
private final HttpRequest.Builder requestBuilder;
|
|
||||||
private final OllamaGenerateRequestModel ollamaRequestModel;
|
|
||||||
private final Queue<String> queue = new LinkedList<>();
|
|
||||||
private String result;
|
|
||||||
private boolean isDone;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
|
|
||||||
* failure. If the request was a failure, the `getResponse()` method will return the error
|
|
||||||
* message.
|
|
||||||
*/
|
|
||||||
@Getter private boolean succeeded;
|
|
||||||
|
|
||||||
private long requestTimeoutSeconds;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
|
|
||||||
* server.
|
|
||||||
*/
|
|
||||||
@Getter private int httpStatusCode;
|
|
||||||
|
|
||||||
/** -- GETTER -- Returns the response time in milliseconds. */
|
|
||||||
@Getter private long responseTime = 0;
|
|
||||||
|
|
||||||
public OllamaAsyncResultCallback(
|
|
||||||
HttpRequest.Builder requestBuilder,
|
|
||||||
OllamaGenerateRequestModel ollamaRequestModel,
|
|
||||||
long requestTimeoutSeconds) {
|
|
||||||
this.requestBuilder = requestBuilder;
|
|
||||||
this.ollamaRequestModel = ollamaRequestModel;
|
|
||||||
this.isDone = false;
|
|
||||||
this.result = "";
|
|
||||||
this.queue.add("");
|
|
||||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
try {
|
|
||||||
long startTime = System.currentTimeMillis();
|
|
||||||
HttpRequest request =
|
|
||||||
requestBuilder
|
|
||||||
.POST(
|
|
||||||
HttpRequest.BodyPublishers.ofString(
|
|
||||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.build();
|
|
||||||
HttpResponse<InputStream> response =
|
|
||||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
this.httpStatusCode = statusCode;
|
|
||||||
|
|
||||||
InputStream responseBodyStream = response.body();
|
|
||||||
try (BufferedReader reader =
|
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
|
||||||
String line;
|
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
if (statusCode == 404) {
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
|
||||||
queue.add(ollamaResponseModel.getError());
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
} else {
|
|
||||||
OllamaGenerateResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
|
||||||
queue.add(ollamaResponseModel.getResponse());
|
|
||||||
if (!ollamaResponseModel.isDone()) {
|
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isDone = true;
|
|
||||||
this.succeeded = true;
|
|
||||||
this.result = responseBuffer.toString();
|
|
||||||
long endTime = System.currentTimeMillis();
|
|
||||||
responseTime = endTime - startTime;
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(this.result);
|
|
||||||
}
|
|
||||||
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
|
||||||
this.isDone = true;
|
|
||||||
this.succeeded = false;
|
|
||||||
this.result = "[FAILED] " + e.getMessage();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the status of the thread. This does not indicate that the request was successful or a
|
|
||||||
* failure, rather it is just a status flag to indicate if the thread is active or ended.
|
|
||||||
*
|
|
||||||
* @return boolean - status
|
|
||||||
*/
|
|
||||||
public boolean isComplete() {
|
|
||||||
return isDone;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the final completion/response when the execution completes. Does not return intermediate results.
|
|
||||||
*
|
|
||||||
* @return String completion/response text
|
|
||||||
*/
|
|
||||||
public String getResponse() {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Queue<String> getStream() {
|
|
||||||
return queue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
|
|
||||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,124 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.EqualsAndHashCode;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.Duration;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@EqualsAndHashCode(callSuper = true)
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
public class OllamaAsyncResultStreamer extends Thread {
|
||||||
|
private final HttpRequest.Builder requestBuilder;
|
||||||
|
private final OllamaGenerateRequestModel ollamaRequestModel;
|
||||||
|
private final OllamaResultStream stream = new OllamaResultStream();
|
||||||
|
private String completeResponse;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
|
||||||
|
* failure. If the request was a failure, the `getResponse()` method will return the error
|
||||||
|
* message.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private boolean succeeded;
|
||||||
|
|
||||||
|
@Setter
|
||||||
|
private long requestTimeoutSeconds;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
|
||||||
|
* server.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private int httpStatusCode;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the response time in milliseconds.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private long responseTime = 0;
|
||||||
|
|
||||||
|
public OllamaAsyncResultStreamer(
|
||||||
|
HttpRequest.Builder requestBuilder,
|
||||||
|
OllamaGenerateRequestModel ollamaRequestModel,
|
||||||
|
long requestTimeoutSeconds) {
|
||||||
|
this.requestBuilder = requestBuilder;
|
||||||
|
this.ollamaRequestModel = ollamaRequestModel;
|
||||||
|
this.completeResponse = "";
|
||||||
|
this.stream.add("");
|
||||||
|
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
ollamaRequestModel.setStream(true);
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
try {
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
HttpRequest request =
|
||||||
|
requestBuilder
|
||||||
|
.POST(
|
||||||
|
HttpRequest.BodyPublishers.ofString(
|
||||||
|
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.build();
|
||||||
|
HttpResponse<InputStream> response =
|
||||||
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
this.httpStatusCode = statusCode;
|
||||||
|
|
||||||
|
InputStream responseBodyStream = response.body();
|
||||||
|
try (BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
|
String line;
|
||||||
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
if (statusCode == 404) {
|
||||||
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||||
|
stream.add(ollamaResponseModel.getError());
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else {
|
||||||
|
OllamaGenerateResponseModel ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||||
|
String res = ollamaResponseModel.getResponse();
|
||||||
|
stream.add(res);
|
||||||
|
if (!ollamaResponseModel.isDone()) {
|
||||||
|
responseBuffer.append(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.succeeded = true;
|
||||||
|
this.completeResponse = responseBuffer.toString();
|
||||||
|
long endTime = System.currentTimeMillis();
|
||||||
|
responseTime = endTime - startTime;
|
||||||
|
}
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(this.completeResponse);
|
||||||
|
}
|
||||||
|
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
||||||
|
this.succeeded = false;
|
||||||
|
this.completeResponse = "[FAILED] " + e.getMessage();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,14 +1,15 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
public class OllamaChatResponseModel {
|
public class OllamaChatResponseModel {
|
||||||
private String model;
|
private String model;
|
||||||
private @JsonProperty("created_at") String createdAt;
|
private @JsonProperty("created_at") String createdAt;
|
||||||
|
private @JsonProperty("done_reason") String doneReason;
|
||||||
private OllamaChatMessage message;
|
private OllamaChatMessage message;
|
||||||
private boolean done;
|
private boolean done;
|
||||||
private String error;
|
private String error;
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
|
||||||
|
|
||||||
public class OllamaChatStreamObserver {
|
public class OllamaChatStreamObserver {
|
||||||
|
|
||||||
private OllamaStreamHandler streamHandler;
|
private OllamaStreamHandler streamHandler;
|
||||||
@@ -17,12 +17,12 @@ public class OllamaChatStreamObserver {
|
|||||||
this.streamHandler = streamHandler;
|
this.streamHandler = streamHandler;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void notify(OllamaChatResponseModel currentResponsePart){
|
public void notify(OllamaChatResponseModel currentResponsePart) {
|
||||||
responseParts.add(currentResponsePart);
|
responseParts.add(currentResponsePart);
|
||||||
handleCurrentResponsePart(currentResponsePart);
|
handleCurrentResponsePart(currentResponsePart);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
|
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
|
||||||
message = message + currentResponsePart.getMessage().getContent();
|
message = message + currentResponsePart.getMessage().getContent();
|
||||||
streamHandler.accept(message);
|
streamHandler.accept(message);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,6 @@ package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
|
||||||
|
|
||||||
public class OllamaGenerateStreamObserver {
|
public class OllamaGenerateStreamObserver {
|
||||||
|
|
||||||
private OllamaStreamHandler streamHandler;
|
private OllamaStreamHandler streamHandler;
|
||||||
@@ -17,12 +15,12 @@ public class OllamaGenerateStreamObserver {
|
|||||||
this.streamHandler = streamHandler;
|
this.streamHandler = streamHandler;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void notify(OllamaGenerateResponseModel currentResponsePart){
|
public void notify(OllamaGenerateResponseModel currentResponsePart) {
|
||||||
responseParts.add(currentResponsePart);
|
responseParts.add(currentResponsePart);
|
||||||
handleCurrentResponsePart(currentResponsePart);
|
handleCurrentResponsePart(currentResponsePart);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
|
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) {
|
||||||
message = message + currentResponsePart.getResponse();
|
message = message + currentResponsePart.getResponse();
|
||||||
streamHandler.accept(message);
|
streamHandler.accept(message);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,7 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public interface OllamaStreamHandler extends Consumer<String> {
|
||||||
|
void accept(String message);
|
||||||
|
}
|
||||||
@@ -1,25 +1,23 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Specialization class for requests
|
* Specialization class for requests
|
||||||
*/
|
*/
|
||||||
public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
||||||
|
|
||||||
@@ -39,14 +37,14 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
|||||||
try {
|
try {
|
||||||
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
||||||
if(streamObserver != null) {
|
if (streamObserver != null) {
|
||||||
streamObserver.notify(ollamaResponseModel);
|
streamObserver.notify(ollamaResponseModel);
|
||||||
}
|
}
|
||||||
return ollamaResponseModel.isDone();
|
return ollamaResponseModel.isDone();
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
LOG.error("Error parsing the Ollama chat response!",e);
|
LOG.error("Error parsing the Ollama chat response!", e);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||||
@@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
|||||||
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
||||||
return super.callSync(body);
|
return super.callSync(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,15 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@@ -12,22 +22,11 @@ import java.nio.charset.StandardCharsets;
|
|||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract helperclass to call the ollama api server.
|
* Abstract helperclass to call the ollama api server.
|
||||||
*/
|
*/
|
||||||
public abstract class OllamaEndpointCaller {
|
public abstract class OllamaEndpointCaller {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
|
|
||||||
private String host;
|
private String host;
|
||||||
@@ -49,107 +48,105 @@ public abstract class OllamaEndpointCaller {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
|
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
|
||||||
*
|
*
|
||||||
* @param body POST body payload
|
* @param body POST body payload
|
||||||
* @return result answer given by the assistant
|
* @return result answer given by the assistant
|
||||||
* @throws OllamaBaseException any response code than 200 has been returned
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
*/
|
*/
|
||||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{
|
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
|
||||||
// Create Request
|
// Create Request
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
URI uri = URI.create(this.host + getEndpointSuffix());
|
URI uri = URI.create(this.host + getEndpointSuffix());
|
||||||
HttpRequest.Builder requestBuilder =
|
HttpRequest.Builder requestBuilder =
|
||||||
getRequestBuilderDefault(uri)
|
getRequestBuilderDefault(uri)
|
||||||
.POST(
|
.POST(
|
||||||
body.getBodyPublisher());
|
body.getBodyPublisher());
|
||||||
HttpRequest request = requestBuilder.build();
|
HttpRequest request = requestBuilder.build();
|
||||||
if (this.verbose) LOG.info("Asking model: " + body.toString());
|
if (this.verbose) LOG.info("Asking model: " + body.toString());
|
||||||
HttpResponse<InputStream> response =
|
HttpResponse<InputStream> response =
|
||||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
|
||||||
|
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
InputStream responseBodyStream = response.body();
|
InputStream responseBodyStream = response.body();
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
try (BufferedReader reader =
|
try (BufferedReader reader =
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
String line;
|
String line;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
if (statusCode == 404) {
|
if (statusCode == 404) {
|
||||||
LOG.warn("Status code: 404 (Not Found)");
|
LOG.warn("Status code: 404 (Not Found)");
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
} else if (statusCode == 401) {
|
} else if (statusCode == 401) {
|
||||||
LOG.warn("Status code: 401 (Unauthorized)");
|
LOG.warn("Status code: 401 (Unauthorized)");
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
Utils.getObjectMapper()
|
Utils.getObjectMapper()
|
||||||
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
} else if (statusCode == 400) {
|
} else if (statusCode == 400) {
|
||||||
LOG.warn("Status code: 400 (Bad Request)");
|
LOG.warn("Status code: 400 (Bad Request)");
|
||||||
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
|
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
|
||||||
OllamaErrorResponseModel.class);
|
OllamaErrorResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
} else {
|
} else {
|
||||||
boolean finished = parseResponseAndAddToBuffer(line,responseBuffer);
|
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
|
||||||
if (finished) {
|
if (finished) {
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (statusCode != 200) {
|
if (statusCode != 200) {
|
||||||
LOG.error("Status code " + statusCode);
|
LOG.error("Status code " + statusCode);
|
||||||
throw new OllamaBaseException(responseBuffer.toString());
|
throw new OllamaBaseException(responseBuffer.toString());
|
||||||
} else {
|
} else {
|
||||||
long endTime = System.currentTimeMillis();
|
long endTime = System.currentTimeMillis();
|
||||||
OllamaResult ollamaResult =
|
OllamaResult ollamaResult =
|
||||||
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
||||||
if (verbose) LOG.info("Model response: " + ollamaResult);
|
if (verbose) LOG.info("Model response: " + ollamaResult);
|
||||||
return ollamaResult;
|
return ollamaResult;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get default request builder.
|
* Get default request builder.
|
||||||
*
|
*
|
||||||
* @param uri URI to get a HttpRequest.Builder
|
* @param uri URI to get a HttpRequest.Builder
|
||||||
* @return HttpRequest.Builder
|
* @return HttpRequest.Builder
|
||||||
*/
|
*/
|
||||||
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
||||||
HttpRequest.Builder requestBuilder =
|
HttpRequest.Builder requestBuilder =
|
||||||
HttpRequest.newBuilder(uri)
|
HttpRequest.newBuilder(uri)
|
||||||
.header("Content-Type", "application/json")
|
.header("Content-Type", "application/json")
|
||||||
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
|
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
|
||||||
if (isBasicAuthCredentialsSet()) {
|
if (isBasicAuthCredentialsSet()) {
|
||||||
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
||||||
|
}
|
||||||
|
return requestBuilder;
|
||||||
}
|
}
|
||||||
return requestBuilder;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get basic authentication header value.
|
* Get basic authentication header value.
|
||||||
*
|
*
|
||||||
* @return basic authentication header value (encoded credentials)
|
* @return basic authentication header value (encoded credentials)
|
||||||
*/
|
*/
|
||||||
private String getBasicAuthHeaderValue() {
|
private String getBasicAuthHeaderValue() {
|
||||||
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
|
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
|
||||||
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Basic Auth credentials set.
|
||||||
|
*
|
||||||
|
* @return true when Basic Auth credentials set
|
||||||
|
*/
|
||||||
|
private boolean isBasicAuthCredentialsSet() {
|
||||||
|
return this.basicAuth != null;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if Basic Auth credentials set.
|
|
||||||
*
|
|
||||||
* @return true when Basic Auth credentials set
|
|
||||||
*/
|
|
||||||
private boolean isBasicAuthCredentialsSet() {
|
|
||||||
return this.basicAuth != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
||||||
|
|
||||||
private OllamaGenerateStreamObserver streamObserver;
|
private OllamaGenerateStreamObserver streamObserver;
|
||||||
|
|
||||||
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -31,24 +31,22 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
try {
|
try {
|
||||||
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||||
if(streamObserver != null) {
|
if (streamObserver != null) {
|
||||||
streamObserver.notify(ollamaResponseModel);
|
streamObserver.notify(ollamaResponseModel);
|
||||||
}
|
}
|
||||||
return ollamaResponseModel.isDone();
|
return ollamaResponseModel.isDone();
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
LOG.error("Error parsing the Ollama chat response!",e);
|
LOG.error("Error parsing the Ollama chat response!", e);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
||||||
return super.callSync(body);
|
return super.callSync(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class OllamaToolsResult {
|
||||||
|
private OllamaResult modelResult;
|
||||||
|
private Map<ToolFunctionCallSpec, Object> toolResults;
|
||||||
|
|
||||||
|
public List<ToolResult> getToolResults() {
|
||||||
|
List<ToolResult> results = new ArrayList<>();
|
||||||
|
for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
|
||||||
|
results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class ToolResult {
|
||||||
|
private String functionName;
|
||||||
|
private Map<String, Object> functionArguments;
|
||||||
|
private Object result;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface ToolFunction {
|
||||||
|
Object apply(Map<String, Object> arguments);
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class ToolFunctionCallSpec {
|
||||||
|
private String name;
|
||||||
|
private Map<String, Object> arguments;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ToolRegistry {
|
||||||
|
private final Map<String, ToolFunction> functionMap = new HashMap<>();
|
||||||
|
|
||||||
|
public ToolFunction getFunction(String name) {
|
||||||
|
return functionMap.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFunction(String name, ToolFunction function) {
|
||||||
|
functionMap.put(name, function);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,113 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class Tools {
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class ToolSpecification {
|
||||||
|
private String functionName;
|
||||||
|
private String functionDescription;
|
||||||
|
private Map<String, PromptFuncDefinition.Property> properties;
|
||||||
|
private ToolFunction toolDefinition;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public static class PromptFuncDefinition {
|
||||||
|
private String type;
|
||||||
|
private PromptFuncSpec function;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class PromptFuncSpec {
|
||||||
|
private String name;
|
||||||
|
private String description;
|
||||||
|
private Parameters parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class Parameters {
|
||||||
|
private String type;
|
||||||
|
private Map<String, Property> properties;
|
||||||
|
private List<String> required;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class Property {
|
||||||
|
private String type;
|
||||||
|
private String description;
|
||||||
|
@JsonProperty("enum")
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
private List<String> enumValues;
|
||||||
|
@JsonIgnore
|
||||||
|
private boolean required;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PropsBuilder {
|
||||||
|
private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
|
||||||
|
|
||||||
|
public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
|
||||||
|
props.put(key, property);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, PromptFuncDefinition.Property> build() {
|
||||||
|
return props;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PromptBuilder {
|
||||||
|
private final List<PromptFuncDefinition> tools = new ArrayList<>();
|
||||||
|
|
||||||
|
private String promptText;
|
||||||
|
|
||||||
|
public String build() throws JsonProcessingException {
|
||||||
|
return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
|
||||||
|
promptText = prompt;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withToolSpecification(ToolSpecification spec) {
|
||||||
|
PromptFuncDefinition def = new PromptFuncDefinition();
|
||||||
|
def.setType("function");
|
||||||
|
|
||||||
|
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
|
||||||
|
functionDetail.setName(spec.getFunctionName());
|
||||||
|
functionDetail.setDescription(spec.getFunctionDescription());
|
||||||
|
|
||||||
|
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
|
||||||
|
parameters.setType("object");
|
||||||
|
parameters.setProperties(spec.getProperties());
|
||||||
|
|
||||||
|
List<String> requiredValues = new ArrayList<>();
|
||||||
|
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
|
||||||
|
if (p.getValue().isRequired()) {
|
||||||
|
requiredValues.add(p.getKey());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parameters.setRequired(requiredValues);
|
||||||
|
functionDetail.setParameters(parameters);
|
||||||
|
def.setFunction(functionDetail);
|
||||||
|
|
||||||
|
tools.add(def);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,72 +8,81 @@ package io.github.amithkoujalgi.ollama4j.core.types;
|
|||||||
*/
|
*/
|
||||||
@SuppressWarnings("ALL")
|
@SuppressWarnings("ALL")
|
||||||
public class OllamaModelType {
|
public class OllamaModelType {
|
||||||
public static final String GEMMA = "gemma";
|
public static final String GEMMA = "gemma";
|
||||||
public static final String LLAMA2 = "llama2";
|
public static final String GEMMA2 = "gemma2";
|
||||||
public static final String MISTRAL = "mistral";
|
|
||||||
public static final String MIXTRAL = "mixtral";
|
|
||||||
public static final String LLAVA = "llava";
|
public static final String LLAMA2 = "llama2";
|
||||||
public static final String NEURAL_CHAT = "neural-chat";
|
public static final String LLAMA3 = "llama3";
|
||||||
public static final String CODELLAMA = "codellama";
|
public static final String MISTRAL = "mistral";
|
||||||
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
public static final String MIXTRAL = "mixtral";
|
||||||
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
public static final String LLAVA = "llava";
|
||||||
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
public static final String LLAVA_PHI3 = "llava-phi3";
|
||||||
public static final String PHI = "phi";
|
public static final String NEURAL_CHAT = "neural-chat";
|
||||||
public static final String ORCA_MINI = "orca-mini";
|
public static final String CODELLAMA = "codellama";
|
||||||
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
||||||
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
|
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
||||||
public static final String VICUNA = "vicuna";
|
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
||||||
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
public static final String PHI = "phi";
|
||||||
public static final String ZEPHYR = "zephyr";
|
public static final String PHI3 = "phi3";
|
||||||
public static final String OPENHERMES = "openhermes";
|
public static final String ORCA_MINI = "orca-mini";
|
||||||
public static final String QWEN = "qwen";
|
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
||||||
public static final String WIZARDCODER = "wizardcoder";
|
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
|
||||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
public static final String VICUNA = "vicuna";
|
||||||
public static final String TINYLLAMA = "tinyllama";
|
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
||||||
public static final String PHIND_CODELLAMA = "phind-codellama";
|
public static final String ZEPHYR = "zephyr";
|
||||||
public static final String OPENCHAT = "openchat";
|
public static final String OPENHERMES = "openhermes";
|
||||||
public static final String ORCA2 = "orca2";
|
public static final String QWEN = "qwen";
|
||||||
public static final String FALCON = "falcon";
|
|
||||||
public static final String WIZARD_MATH = "wizard-math";
|
public static final String QWEN2 = "qwen2";
|
||||||
public static final String TINYDOLPHIN = "tinydolphin";
|
public static final String WIZARDCODER = "wizardcoder";
|
||||||
public static final String NOUS_HERMES = "nous-hermes";
|
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||||
public static final String YI = "yi";
|
public static final String TINYLLAMA = "tinyllama";
|
||||||
public static final String DOLPHIN_PHI = "dolphin-phi";
|
public static final String PHIND_CODELLAMA = "phind-codellama";
|
||||||
public static final String STARLING_LM = "starling-lm";
|
public static final String OPENCHAT = "openchat";
|
||||||
public static final String STARCODER = "starcoder";
|
public static final String ORCA2 = "orca2";
|
||||||
public static final String CODEUP = "codeup";
|
public static final String FALCON = "falcon";
|
||||||
public static final String MEDLLAMA2 = "medllama2";
|
public static final String WIZARD_MATH = "wizard-math";
|
||||||
public static final String STABLE_CODE = "stable-code";
|
public static final String TINYDOLPHIN = "tinydolphin";
|
||||||
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
public static final String NOUS_HERMES = "nous-hermes";
|
||||||
public static final String BAKLLAVA = "bakllava";
|
public static final String YI = "yi";
|
||||||
public static final String EVERYTHINGLM = "everythinglm";
|
public static final String DOLPHIN_PHI = "dolphin-phi";
|
||||||
public static final String SOLAR = "solar";
|
public static final String STARLING_LM = "starling-lm";
|
||||||
public static final String STABLE_BELUGA = "stable-beluga";
|
public static final String STARCODER = "starcoder";
|
||||||
public static final String SQLCODER = "sqlcoder";
|
public static final String CODEUP = "codeup";
|
||||||
public static final String YARN_MISTRAL = "yarn-mistral";
|
public static final String MEDLLAMA2 = "medllama2";
|
||||||
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
|
public static final String STABLE_CODE = "stable-code";
|
||||||
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
||||||
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
public static final String BAKLLAVA = "bakllava";
|
||||||
public static final String MEDITRON = "meditron";
|
public static final String EVERYTHINGLM = "everythinglm";
|
||||||
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
public static final String SOLAR = "solar";
|
||||||
public static final String STABLELM2 = "stablelm2";
|
public static final String STABLE_BELUGA = "stable-beluga";
|
||||||
public static final String MAGICODER = "magicoder";
|
public static final String SQLCODER = "sqlcoder";
|
||||||
public static final String YARN_LLAMA2 = "yarn-llama2";
|
public static final String YARN_MISTRAL = "yarn-mistral";
|
||||||
public static final String NOUS_HERMES2 = "nous-hermes2";
|
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
|
||||||
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
||||||
public static final String LLAMA_PRO = "llama-pro";
|
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
||||||
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
public static final String MEDITRON = "meditron";
|
||||||
public static final String CODEBOOGA = "codebooga";
|
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
||||||
public static final String MISTRALLITE = "mistrallite";
|
public static final String STABLELM2 = "stablelm2";
|
||||||
public static final String NEXUSRAVEN = "nexusraven";
|
public static final String MAGICODER = "magicoder";
|
||||||
public static final String GOLIATH = "goliath";
|
public static final String YARN_LLAMA2 = "yarn-llama2";
|
||||||
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
|
public static final String NOUS_HERMES2 = "nous-hermes2";
|
||||||
public static final String NOTUX = "notux";
|
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
||||||
public static final String ALFRED = "alfred";
|
public static final String LLAMA_PRO = "llama-pro";
|
||||||
public static final String MEGADOLPHIN = "megadolphin";
|
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
||||||
public static final String WIZARDLM = "wizardlm";
|
public static final String CODEBOOGA = "codebooga";
|
||||||
public static final String XWINLM = "xwinlm";
|
public static final String MISTRALLITE = "mistrallite";
|
||||||
public static final String NOTUS = "notus";
|
public static final String NEXUSRAVEN = "nexusraven";
|
||||||
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
public static final String GOLIATH = "goliath";
|
||||||
public static final String ALL_MINILM = "all-minilm";
|
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
|
||||||
|
public static final String NOTUX = "notux";
|
||||||
|
public static final String ALFRED = "alfred";
|
||||||
|
public static final String MEGADOLPHIN = "megadolphin";
|
||||||
|
public static final String WIZARDLM = "wizardlm";
|
||||||
|
public static final String XWINLM = "xwinlm";
|
||||||
|
public static final String NOTUS = "notus";
|
||||||
|
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
||||||
|
public static final String ALL_MINILM = "all-minilm";
|
||||||
|
public static final String CODESTRAL = "codestral";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,10 +8,18 @@ import java.net.URISyntaxException;
|
|||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
|
||||||
public class Utils {
|
public class Utils {
|
||||||
|
|
||||||
|
private static ObjectMapper objectMapper;
|
||||||
|
|
||||||
public static ObjectMapper getObjectMapper() {
|
public static ObjectMapper getObjectMapper() {
|
||||||
return new ObjectMapper();
|
if(objectMapper == null) {
|
||||||
|
objectMapper = new ObjectMapper();
|
||||||
|
objectMapper.registerModule(new JavaTimeModule());
|
||||||
|
}
|
||||||
|
return objectMapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static byte[] loadImageBytesFromUrl(String imageUrl)
|
public static byte[] loadImageBytesFromUrl(String imageUrl)
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.integrationtests;
|
package io.github.amithkoujalgi.ollama4j.integrationtests;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
||||||
@@ -10,9 +8,16 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
import lombok.Data;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Order;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@@ -22,372 +27,369 @@ import java.net.http.HttpConnectTimeoutException;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import lombok.Data;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
import org.junit.jupiter.api.Order;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
class TestRealAPIs {
|
class TestRealAPIs {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
|
||||||
|
|
||||||
OllamaAPI ollamaAPI;
|
OllamaAPI ollamaAPI;
|
||||||
Config config;
|
Config config;
|
||||||
|
|
||||||
private File getImageFileFromClasspath(String fileName) {
|
private File getImageFileFromClasspath(String fileName) {
|
||||||
ClassLoader classLoader = getClass().getClassLoader();
|
ClassLoader classLoader = getClass().getClassLoader();
|
||||||
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
|
return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
void setUp() {
|
|
||||||
config = new Config();
|
|
||||||
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
|
||||||
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1)
|
|
||||||
void testWrongEndpoint() {
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
|
|
||||||
assertThrows(ConnectException.class, ollamaAPI::listModels);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(1)
|
|
||||||
void testEndpointReachability() {
|
|
||||||
try {
|
|
||||||
assertNotNull(ollamaAPI.listModels());
|
|
||||||
} catch (HttpConnectTimeoutException e) {
|
|
||||||
fail(e.getMessage());
|
|
||||||
} catch (Exception e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@BeforeEach
|
||||||
@Order(2)
|
void setUp() {
|
||||||
void testListModels() {
|
config = new Config();
|
||||||
testEndpointReachability();
|
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
||||||
try {
|
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
||||||
assertNotNull(ollamaAPI.listModels());
|
|
||||||
ollamaAPI.listModels().forEach(System.out::println);
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(2)
|
@Order(1)
|
||||||
void testPullModel() {
|
void testWrongEndpoint() {
|
||||||
testEndpointReachability();
|
OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
|
||||||
try {
|
assertThrows(ConnectException.class, ollamaAPI::listModels);
|
||||||
ollamaAPI.pullModel(config.getModel());
|
|
||||||
boolean found =
|
|
||||||
ollamaAPI.listModels().stream()
|
|
||||||
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
|
||||||
assertTrue(found);
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(1)
|
||||||
void testListDtails() {
|
void testEndpointReachability() {
|
||||||
testEndpointReachability();
|
try {
|
||||||
try {
|
assertNotNull(ollamaAPI.listModels());
|
||||||
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
|
} catch (HttpConnectTimeoutException e) {
|
||||||
assertNotNull(modelDetails);
|
fail(e.getMessage());
|
||||||
System.out.println(modelDetails);
|
} catch (Exception e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(2)
|
||||||
void testAskModelWithDefaultOptions() {
|
void testListModels() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
assertNotNull(ollamaAPI.listModels());
|
||||||
ollamaAPI.generate(
|
ollamaAPI.listModels().forEach(System.out::println);
|
||||||
config.getModel(),
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
fail(e);
|
||||||
new OptionsBuilder().build());
|
}
|
||||||
assertNotNull(result);
|
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(2)
|
||||||
void testAskModelWithDefaultOptionsStreamed() {
|
void testPullModel() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
|
ollamaAPI.pullModel(config.getModel());
|
||||||
StringBuffer sb = new StringBuffer("");
|
boolean found =
|
||||||
|
ollamaAPI.listModels().stream()
|
||||||
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
assertTrue(found);
|
||||||
new OptionsBuilder().build(), (s) -> {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
LOG.info(s);
|
fail(e);
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
}
|
||||||
LOG.info(substring);
|
|
||||||
sb.append(substring);
|
|
||||||
});
|
|
||||||
|
|
||||||
assertNotNull(result);
|
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptions() {
|
void testListDtails() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
|
||||||
ollamaAPI.generate(
|
assertNotNull(modelDetails);
|
||||||
config.getModel(),
|
System.out.println(modelDetails);
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
new OptionsBuilder().setTemperature(0.9f).build());
|
fail(e);
|
||||||
assertNotNull(result);
|
}
|
||||||
assertNotNull(result.getResponse());
|
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChat() {
|
void testAskModelWithDefaultOptions() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
OllamaResult result =
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
ollamaAPI.generate(
|
||||||
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
|
config.getModel(),
|
||||||
.withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
.build();
|
false,
|
||||||
|
new OptionsBuilder().build());
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
assertNotNull(result);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(result.getResponse());
|
||||||
assertFalse(chatResult.getResponse().isBlank());
|
assertFalse(result.getResponse().isEmpty());
|
||||||
assertEquals(4,chatResult.getChatHistory().size());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithSystemPrompt() {
|
void testAskModelWithDefaultOptionsStreamed() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
StringBuffer sb = new StringBuffer("");
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
|
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||||
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
.withMessage(OllamaChatMessageRole.USER,
|
false,
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
new OptionsBuilder().build(), (s) -> {
|
||||||
.build();
|
LOG.info(s);
|
||||||
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
|
LOG.info(substring);
|
||||||
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
assertNotNull(result);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(result.getResponse());
|
||||||
assertFalse(chatResult.getResponse().isBlank());
|
assertFalse(result.getResponse().isEmpty());
|
||||||
assertTrue(chatResult.getResponse().startsWith("NI"));
|
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
||||||
assertEquals(3, chatResult.getChatHistory().size());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
fail(e);
|
||||||
fail(e);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithStream() {
|
void testAskModelWithOptions() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
OllamaResult result =
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
ollamaAPI.generate(
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
config.getModel(),
|
||||||
.build();
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
|
true,
|
||||||
StringBuffer sb = new StringBuffer("");
|
new OptionsBuilder().setTemperature(0.9f).build());
|
||||||
|
assertNotNull(result);
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
|
assertNotNull(result.getResponse());
|
||||||
LOG.info(s);
|
assertFalse(result.getResponse().isEmpty());
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
LOG.info(substring);
|
fail(e);
|
||||||
sb.append(substring);
|
}
|
||||||
});
|
|
||||||
assertNotNull(chatResult);
|
|
||||||
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithImageFromFileWithHistoryRecognition() {
|
void testChat() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder =
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||||
OllamaChatRequestModel requestModel =
|
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
|
||||||
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
.withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
|
||||||
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
.build();
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(chatResult);
|
||||||
assertNotNull(chatResult.getResponse());
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
|
assertEquals(4, chatResult.getChatHistory().size());
|
||||||
builder.reset();
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
requestModel =
|
}
|
||||||
builder.withMessages(chatResult.getChatHistory())
|
|
||||||
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
|
||||||
|
|
||||||
chatResult = ollamaAPI.chat(requestModel);
|
|
||||||
assertNotNull(chatResult);
|
|
||||||
assertNotNull(chatResult.getResponse());
|
|
||||||
|
|
||||||
|
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testChatWithImageFromURL() {
|
void testChatWithSystemPrompt() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
|
"You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
.build();
|
.withMessage(OllamaChatMessageRole.USER,
|
||||||
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
assertNotNull(chatResult);
|
assertNotNull(chatResult);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
fail(e);
|
assertTrue(chatResult.getResponse().startsWith("NI"));
|
||||||
|
assertEquals(3, chatResult.getChatHistory().size());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageFiles() {
|
void testChatWithStream() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
try {
|
||||||
try {
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaResult result =
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
||||||
ollamaAPI.generateWithImageFiles(
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
config.getImageModel(),
|
.build();
|
||||||
"What is in this image?",
|
|
||||||
List.of(imageFile),
|
StringBuffer sb = new StringBuffer("");
|
||||||
new OptionsBuilder().build());
|
|
||||||
assertNotNull(result);
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> {
|
||||||
assertNotNull(result.getResponse());
|
LOG.info(s);
|
||||||
assertFalse(result.getResponse().isEmpty());
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
LOG.info(substring);
|
||||||
fail(e);
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
assertNotNull(chatResult);
|
||||||
|
assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageFilesStreamed() {
|
void testChatWithImageFromFileWithHistoryRecognition() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
try {
|
||||||
try {
|
OllamaChatRequestBuilder builder =
|
||||||
StringBuffer sb = new StringBuffer("");
|
OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
||||||
|
OllamaChatRequestModel requestModel =
|
||||||
|
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
|
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
|
assertNotNull(chatResult);
|
||||||
LOG.info(s);
|
assertNotNull(chatResult.getResponse());
|
||||||
String substring = s.substring(sb.toString().length(), s.length());
|
|
||||||
LOG.info(substring);
|
builder.reset();
|
||||||
sb.append(substring);
|
|
||||||
});
|
requestModel =
|
||||||
assertNotNull(result);
|
builder.withMessages(chatResult.getChatHistory())
|
||||||
assertNotNull(result.getResponse());
|
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
||||||
assertFalse(result.getResponse().isEmpty());
|
|
||||||
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertNotNull(chatResult);
|
||||||
fail(e);
|
assertNotNull(chatResult.getResponse());
|
||||||
|
|
||||||
|
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageURLs() {
|
void testChatWithImageFromURL() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
|
||||||
ollamaAPI.generateWithImageURLs(
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
config.getImageModel(),
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
|
||||||
"What is in this image?",
|
.build();
|
||||||
List.of(
|
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
new OptionsBuilder().build());
|
assertNotNull(chatResult);
|
||||||
assertNotNull(result);
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
assertNotNull(result.getResponse());
|
fail(e);
|
||||||
assertFalse(result.getResponse().isEmpty());
|
}
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
|
||||||
fail(e);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
public void testEmbedding() {
|
void testAskModelWithOptionsAndImageFiles() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||||
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
|
try {
|
||||||
.getInstance(config.getModel(), "What is the capital of France?").build();
|
OllamaResult result =
|
||||||
|
ollamaAPI.generateWithImageFiles(
|
||||||
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
|
config.getImageModel(),
|
||||||
|
"What is in this image?",
|
||||||
assertNotNull(embeddings);
|
List.of(imageFile),
|
||||||
assertFalse(embeddings.isEmpty());
|
new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
assertNotNull(result);
|
||||||
fail(e);
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testAskModelWithOptionsAndImageFilesStreamed() {
|
||||||
|
testEndpointReachability();
|
||||||
|
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||||
|
try {
|
||||||
|
StringBuffer sb = new StringBuffer("");
|
||||||
|
|
||||||
|
OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
|
||||||
|
"What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
|
||||||
|
LOG.info(s);
|
||||||
|
String substring = s.substring(sb.toString().length(), s.length());
|
||||||
|
LOG.info(substring);
|
||||||
|
sb.append(substring);
|
||||||
|
});
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
assertEquals(sb.toString().trim(), result.getResponse().trim());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testAskModelWithOptionsAndImageURLs() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaResult result =
|
||||||
|
ollamaAPI.generateWithImageURLs(
|
||||||
|
config.getImageModel(),
|
||||||
|
"What is in this image?",
|
||||||
|
List.of(
|
||||||
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
||||||
|
new OptionsBuilder().build());
|
||||||
|
assertNotNull(result);
|
||||||
|
assertNotNull(result.getResponse());
|
||||||
|
assertFalse(result.getResponse().isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
public void testEmbedding() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
|
||||||
|
.getInstance(config.getModel(), "What is the capital of France?").build();
|
||||||
|
|
||||||
|
List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
|
||||||
|
|
||||||
|
assertNotNull(embeddings);
|
||||||
|
assertFalse(embeddings.isEmpty());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
fail(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
class Config {
|
class Config {
|
||||||
private String ollamaURL;
|
private String ollamaURL;
|
||||||
private String model;
|
private String model;
|
||||||
private String imageModel;
|
private String imageModel;
|
||||||
private int requestTimeoutSeconds;
|
private int requestTimeoutSeconds;
|
||||||
|
|
||||||
public Config() {
|
public Config() {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
try (InputStream input =
|
try (InputStream input =
|
||||||
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
||||||
if (input == null) {
|
if (input == null) {
|
||||||
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
||||||
}
|
}
|
||||||
properties.load(input);
|
properties.load(input);
|
||||||
this.ollamaURL = properties.getProperty("ollama.url");
|
this.ollamaURL = properties.getProperty("ollama.url");
|
||||||
this.model = properties.getProperty("ollama.model");
|
this.model = properties.getProperty("ollama.model");
|
||||||
this.imageModel = properties.getProperty("ollama.model.image");
|
this.imageModel = properties.getProperty("ollama.model.image");
|
||||||
this.requestTimeoutSeconds =
|
this.requestTimeoutSeconds =
|
||||||
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException("Error loading properties", e);
|
throw new RuntimeException("Error loading properties", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,163 +1,164 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.unittests;
|
package io.github.amithkoujalgi.ollama4j.unittests;
|
||||||
|
|
||||||
import static org.mockito.Mockito.*;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.mockito.Mockito;
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
class TestMockedAPIs {
|
class TestMockedAPIs {
|
||||||
@Test
|
@Test
|
||||||
void testPullModel() {
|
void testPullModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).pullModel(model);
|
doNothing().when(ollamaAPI).pullModel(model);
|
||||||
ollamaAPI.pullModel(model);
|
ollamaAPI.pullModel(model);
|
||||||
verify(ollamaAPI, times(1)).pullModel(model);
|
verify(ollamaAPI, times(1)).pullModel(model);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testListModels() {
|
void testListModels() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
|
when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
|
||||||
ollamaAPI.listModels();
|
ollamaAPI.listModels();
|
||||||
verify(ollamaAPI, times(1)).listModels();
|
verify(ollamaAPI, times(1)).listModels();
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testCreateModel() {
|
void testCreateModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
|
String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
|
doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
|
||||||
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
|
ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
|
||||||
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
|
verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testDeleteModel() {
|
void testDeleteModel() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
doNothing().when(ollamaAPI).deleteModel(model, true);
|
doNothing().when(ollamaAPI).deleteModel(model, true);
|
||||||
ollamaAPI.deleteModel(model, true);
|
ollamaAPI.deleteModel(model, true);
|
||||||
verify(ollamaAPI, times(1)).deleteModel(model, true);
|
verify(ollamaAPI, times(1)).deleteModel(model, true);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testGetModelDetails() {
|
void testGetModelDetails() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
|
when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
|
||||||
ollamaAPI.getModelDetails(model);
|
ollamaAPI.getModelDetails(model);
|
||||||
verify(ollamaAPI, times(1)).getModelDetails(model);
|
verify(ollamaAPI, times(1)).getModelDetails(model);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testGenerateEmbeddings() {
|
void testGenerateEmbeddings() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
|
when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
|
||||||
ollamaAPI.generateEmbeddings(model, prompt);
|
ollamaAPI.generateEmbeddings(model, prompt);
|
||||||
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
|
verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAsk() {
|
void testAsk() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
|
when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generate(model, prompt, optionsBuilder.build());
|
ollamaAPI.generate(model, prompt, false, optionsBuilder.build());
|
||||||
verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
|
verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskWithImageFiles() {
|
void testAskWithImageFiles() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateWithImageFiles(
|
when(ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generateWithImageFiles(
|
ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.generateWithImageFiles(
|
.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskWithImageURLs() {
|
void testAskWithImageURLs() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.generateWithImageURLs(
|
when(ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.generateWithImageURLs(
|
ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.generateWithImageURLs(
|
.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAskAsync() {
|
void testAskAsync() {
|
||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
when(ollamaAPI.generateAsync(model, prompt))
|
when(ollamaAPI.generateAsync(model, prompt, false))
|
||||||
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
.thenReturn(new OllamaAsyncResultStreamer(null, null, 3));
|
||||||
ollamaAPI.generateAsync(model, prompt);
|
ollamaAPI.generateAsync(model, prompt, false);
|
||||||
verify(ollamaAPI, times(1)).generateAsync(model, prompt);
|
verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,30 +6,30 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
|
||||||
public abstract class AbstractRequestSerializationTest<T> {
|
public abstract class AbstractSerializationTest<T> {
|
||||||
|
|
||||||
protected ObjectMapper mapper = Utils.getObjectMapper();
|
protected ObjectMapper mapper = Utils.getObjectMapper();
|
||||||
|
|
||||||
protected String serializeRequest(T req) {
|
protected String serialize(T obj) {
|
||||||
try {
|
try {
|
||||||
return mapper.writeValueAsString(req);
|
return mapper.writeValueAsString(obj);
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
fail("Could not serialize request!", e);
|
fail("Could not serialize request!", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
|
protected T deserialize(String jsonObject, Class<T> deserializationClass) {
|
||||||
try {
|
try {
|
||||||
return mapper.readValue(jsonRequest, requestClass);
|
return mapper.readValue(jsonObject, deserializationClass);
|
||||||
} catch (JsonProcessingException e) {
|
} catch (JsonProcessingException e) {
|
||||||
fail("Could not deserialize jsonRequest!", e);
|
fail("Could not deserialize jsonObject!", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
|
protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
|
||||||
T req) {
|
T req) {
|
||||||
assertEquals(req, unmarshalledRequest);
|
assertEquals(req, unmarshalledObject);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -14,7 +14,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilde
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
|
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
|
||||||
|
|
||||||
private OllamaChatRequestBuilder builder;
|
private OllamaChatRequestBuilder builder;
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
@Test
|
@Test
|
||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -35,28 +35,43 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
|
||||||
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||||
.build();
|
.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRequestWithMessageAndImage() {
|
public void testRequestWithMessageAndImage() {
|
||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
|
||||||
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
|
List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRequestWithOptions() {
|
public void testRequestWithOptions() {
|
||||||
OptionsBuilder b = new OptionsBuilder();
|
OptionsBuilder b = new OptionsBuilder();
|
||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||||
.withOptions(b.setMirostat(1).build()).build();
|
.withOptions(b.setMirostat(1).build())
|
||||||
|
.withOptions(b.setTemperature(1L).build())
|
||||||
|
.withOptions(b.setMirostatEta(1L).build())
|
||||||
|
.withOptions(b.setMirostatTau(1L).build())
|
||||||
|
.withOptions(b.setNumGpu(1).build())
|
||||||
|
.withOptions(b.setSeed(1).build())
|
||||||
|
.withOptions(b.setTopK(1).build())
|
||||||
|
.withOptions(b.setTopP(1).build())
|
||||||
|
.build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class);
|
OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
|
assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
|
||||||
|
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
|
||||||
|
assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
|
||||||
|
assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
|
||||||
|
assertEquals(1, deserializeRequest.getOptions().get("seed"));
|
||||||
|
assertEquals(1, deserializeRequest.getOptions().get("top_k"));
|
||||||
|
assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -64,11 +79,35 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
|
|||||||
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||||
.withGetJsonResponse().build();
|
.withGetJsonResponse().build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||||
// of request is never used in real code anyways
|
// of request is never used in real code anyways
|
||||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||||
String requestFormatProperty = jsonObject.getString("format");
|
String requestFormatProperty = jsonObject.getString("format");
|
||||||
assertEquals("json", requestFormatProperty);
|
assertEquals("json", requestFormatProperty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testWithTemplate() {
|
||||||
|
OllamaChatRequestModel req = builder.withTemplate("System Template")
|
||||||
|
.build();
|
||||||
|
String jsonRequest = serialize(req);
|
||||||
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testWithStreaming() {
|
||||||
|
OllamaChatRequestModel req = builder.withStreaming().build();
|
||||||
|
String jsonRequest = serialize(req);
|
||||||
|
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testWithKeepAlive() {
|
||||||
|
String expectedKeepAlive = "5m";
|
||||||
|
OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
|
||||||
|
.build();
|
||||||
|
String jsonRequest = serialize(req);
|
||||||
|
assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsR
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
|
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
|
||||||
|
|
||||||
private OllamaEmbeddingsRequestBuilder builder;
|
private OllamaEmbeddingsRequestBuilder builder;
|
||||||
|
|
||||||
@@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
|||||||
@Test
|
@Test
|
||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaEmbeddingsRequestModel req = builder.build();
|
OllamaEmbeddingsRequestModel req = builder.build();
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
|
|||||||
OllamaEmbeddingsRequestModel req = builder
|
OllamaEmbeddingsRequestModel req = builder
|
||||||
.withOptions(b.setMirostat(1).build()).build();
|
.withOptions(b.setMirostat(1).build()).build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateReque
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
|
|
||||||
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
|
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
|
||||||
|
|
||||||
private OllamaGenerateRequestBuilder builder;
|
private OllamaGenerateRequestBuilder builder;
|
||||||
|
|
||||||
@@ -24,8 +24,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
|
OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -34,8 +34,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
OllamaGenerateRequestModel req =
|
OllamaGenerateRequestModel req =
|
||||||
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
|
builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
|
OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
}
|
}
|
||||||
@@ -45,7 +45,7 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
|
|||||||
OllamaGenerateRequestModel req =
|
OllamaGenerateRequestModel req =
|
||||||
builder.withPrompt("Some prompt").withGetJsonResponse().build();
|
builder.withPrompt("Some prompt").withGetJsonResponse().build();
|
||||||
|
|
||||||
String jsonRequest = serializeRequest(req);
|
String jsonRequest = serialize(req);
|
||||||
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
// no jackson deserialization as format property is not boolean ==> omit as deserialization
|
||||||
// of request is never used in real code anyways
|
// of request is never used in real code anyways
|
||||||
JSONObject jsonObject = new JSONObject(jsonRequest);
|
JSONObject jsonObject = new JSONObject(jsonRequest);
|
||||||
|
|||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.Model;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeserializationOfModelResponseWithOffsetTime(){
|
||||||
|
String serializedTestStringWithOffsetTime = "{\n"
|
||||||
|
+ "\"name\": \"codellama:13b\",\n"
|
||||||
|
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n"
|
||||||
|
+ "\"size\": 7365960935,\n"
|
||||||
|
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||||
|
+ "\"details\": {\n"
|
||||||
|
+ "\"format\": \"gguf\",\n"
|
||||||
|
+ "\"family\": \"llama\",\n"
|
||||||
|
+ "\"families\": null,\n"
|
||||||
|
+ "\"parameter_size\": \"13B\",\n"
|
||||||
|
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||||
|
+ "}}";
|
||||||
|
deserialize(serializedTestStringWithOffsetTime,Model.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeserializationOfModelResponseWithZuluTime(){
|
||||||
|
String serializedTestStringWithZuluTimezone = "{\n"
|
||||||
|
+ "\"name\": \"codellama:13b\",\n"
|
||||||
|
+ "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n"
|
||||||
|
+ "\"size\": 7365960935,\n"
|
||||||
|
+ "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
|
||||||
|
+ "\"details\": {\n"
|
||||||
|
+ "\"format\": \"gguf\",\n"
|
||||||
|
+ "\"family\": \"llama\",\n"
|
||||||
|
+ "\"families\": null,\n"
|
||||||
|
+ "\"parameter_size\": \"13B\",\n"
|
||||||
|
+ "\"quantization_level\": \"Q4_0\"\n"
|
||||||
|
+ "}}";
|
||||||
|
deserialize(serializedTestStringWithZuluTimezone,Model.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user