forked from Mirror/ollama4j
Compare commits
314 Commits
Author | SHA1 | Date | |
---|---|---|---|
893e5dd763 | |||
c520604f4b | |||
a85c23d64a | |||
d32a8b7d88 | |||
![]() |
992625cf86 | ||
![]() |
bbebd26d07 | ||
![]() |
3aa0fc77cb | ||
![]() |
11a98a72a1 | ||
![]() |
422601c0fc | ||
![]() |
75e6576a13 | ||
![]() |
51dd3f3e1e | ||
![]() |
30250f79d9 | ||
![]() |
d4ee9ed051 | ||
![]() |
4412ac683a | ||
![]() |
b5b1a26941 | ||
![]() |
a84230bbd1 | ||
![]() |
00c9b16556 | ||
![]() |
9a2194334f | ||
![]() |
f9cf11ecdf | ||
![]() |
0af80865c3 | ||
![]() |
a304c01194 | ||
![]() |
887708864e | ||
![]() |
2f0c4fdcc9 | ||
![]() |
73aabd7ca6 | ||
![]() |
17ca2bdee3 | ||
![]() |
e43bd3acb4 | ||
![]() |
0b041f4340 | ||
![]() |
6c6062b757 | ||
![]() |
68fd8b7cc8 | ||
![]() |
bb6f8aa343 | ||
![]() |
12802be0bc | ||
![]() |
bd56ccfef7 | ||
![]() |
51563f276f | ||
![]() |
6e282124bf | ||
![]() |
3ab9e4c283 | ||
![]() |
2db6a22cc7 | ||
![]() |
cc69341620 | ||
![]() |
4589a9032c | ||
![]() |
da273402b5 | ||
![]() |
cfa8aa14d7 | ||
![]() |
bc4e8303aa | ||
![]() |
f2f740a2a0 | ||
![]() |
4cbb783a61 | ||
![]() |
5c9e0b7d8a | ||
![]() |
2f8577a24d | ||
![]() |
02116b7025 | ||
![]() |
f3778f8786 | ||
![]() |
c6141634db | ||
![]() |
d9f98ad901 | ||
![]() |
79d97445b8 | ||
![]() |
1c40697c96 | ||
![]() |
f03026abb3 | ||
![]() |
63a6e81ac2 | ||
![]() |
76cad0f584 | ||
![]() |
bee2908d1e | ||
![]() |
8a4c9fd969 | ||
![]() |
d470f940b0 | ||
![]() |
df402efaba | ||
![]() |
677362abbf | ||
![]() |
81689be194 | ||
![]() |
fd93036d08 | ||
![]() |
c9b05a725b | ||
![]() |
a4e1b4afe9 | ||
![]() |
3d21813abb | ||
![]() |
383d0f56ca | ||
![]() |
af1b213a76 | ||
![]() |
fed89a9643 | ||
![]() |
fd32aa33ff | ||
![]() |
b8a13e89b1 | ||
![]() |
c8f27edd6e | ||
![]() |
5a936d8174 | ||
![]() |
9b5ddbf4c4 | ||
![]() |
7c233d5734 | ||
![]() |
e85aeae6e0 | ||
![]() |
a05052e095 | ||
![]() |
10eb803e26 | ||
![]() |
bd2da8fdda | ||
![]() |
b0bb082bec | ||
![]() |
81f564ef7f | ||
![]() |
006b52f3db | ||
![]() |
16634e60e4 | ||
![]() |
db8b73075b | ||
![]() |
dc9f79959a | ||
![]() |
88f6d00763 | ||
![]() |
fd3a989a49 | ||
![]() |
7580c6a549 | ||
![]() |
9e6503d84b | ||
![]() |
ee21f7fdd8 | ||
![]() |
ecc295f484 | ||
![]() |
c528fef5fc | ||
![]() |
38f1bda105 | ||
![]() |
d8a703503a | ||
![]() |
dd9ba7c937 | ||
![]() |
cf52c9610c | ||
![]() |
e8d709e99a | ||
![]() |
51fbedad69 | ||
![]() |
953605fa73 | ||
![]() |
30bfdd9c6d | ||
![]() |
91ee6cb4c1 | ||
![]() |
8ef6fac28e | ||
![]() |
d9e3860123 | ||
![]() |
515d1f0399 | ||
![]() |
be549430c5 | ||
![]() |
4744315d45 | ||
![]() |
8eea19a539 | ||
![]() |
b5801d84e0 | ||
![]() |
165d04b1bb | ||
![]() |
16d2160b52 | ||
![]() |
e39c47b8e1 | ||
![]() |
bb0785140b | ||
![]() |
e33ad1a1e3 | ||
![]() |
cd60c506cb | ||
![]() |
b55925df28 | ||
![]() |
3a9b8c309d | ||
![]() |
bf07159522 | ||
![]() |
f8ca4d041d | ||
![]() |
9c6a55f7b0 | ||
![]() |
2866d83a2f | ||
![]() |
45e5d07581 | ||
![]() |
3a264cb6bb | ||
![]() |
e1b9d42771 | ||
![]() |
1a086c37c0 | ||
![]() |
54edba144c | ||
![]() |
3ed3187ba9 | ||
![]() |
b7cd81a7f5 | ||
![]() |
e750c2d7f9 | ||
![]() |
62f16131f3 | ||
![]() |
2cbaf12d7c | ||
![]() |
e2d555d404 | ||
![]() |
c296b34174 | ||
![]() |
e8f99f28ec | ||
![]() |
250b1abc79 | ||
![]() |
42b15ad93f | ||
![]() |
6f7a714bae | ||
![]() |
92618e5084 | ||
![]() |
391a9242c3 | ||
![]() |
e1b6dc3b54 | ||
![]() |
04124cf978 | ||
![]() |
e4e717b747 | ||
![]() |
10d2a8f5ff | ||
![]() |
899fa38805 | ||
![]() |
2df878c953 | ||
![]() |
78a5eedc8f | ||
![]() |
364f961ee2 | ||
![]() |
b21aa6add2 | ||
![]() |
ec4abd1c2d | ||
![]() |
9900ae92fb | ||
![]() |
fa20daf6e5 | ||
![]() |
44949c0559 | ||
![]() |
e88711a017 | ||
![]() |
32169ded18 | ||
![]() |
4b2d566fd9 | ||
![]() |
fb4b7a7ce5 | ||
![]() |
18f27775b0 | ||
![]() |
cb462ad05a | ||
![]() |
1eec22ca1a | ||
![]() |
c1f3c51f88 | ||
![]() |
7dd556293f | ||
![]() |
ee50131ce4 | ||
![]() |
2cd47dbfaa | ||
![]() |
e5296c1067 | ||
![]() |
0f00f05e3d | ||
![]() |
976a3b82e5 | ||
![]() |
ba26d620c4 | ||
![]() |
e45246a767 | ||
![]() |
7336668f0c | ||
![]() |
11701fb222 | ||
![]() |
b1ec12c4e9 | ||
![]() |
d0b0a0fc97 | ||
![]() |
20774fca6b | ||
![]() |
9c46b510d8 | ||
![]() |
9d887b60a8 | ||
![]() |
63d4de4e24 | ||
![]() |
9224d2da06 | ||
![]() |
a10692e2f1 | ||
![]() |
b0c152a42e | ||
![]() |
f44767e023 | ||
![]() |
aadef0a57c | ||
![]() |
777ee7ffe0 | ||
![]() |
dcf1d0bdbc | ||
![]() |
13b7111a42 | ||
![]() |
09442d37a3 | ||
![]() |
1e66bdb07f | ||
![]() |
b423090db9 | ||
![]() |
a32d94efbf | ||
![]() |
31f8302849 | ||
![]() |
6487756764 | ||
![]() |
abb76ad867 | ||
![]() |
cf4e7a96e8 | ||
![]() |
0f414f71a3 | ||
![]() |
2b700fdad8 | ||
![]() |
06c5daa253 | ||
![]() |
91aab6cbd1 | ||
![]() |
f38a00ebdc | ||
![]() |
0f73ea75ab | ||
![]() |
8fe869afdb | ||
![]() |
2d274c4f5b | ||
![]() |
713a3239a4 | ||
![]() |
a9e7958d44 | ||
![]() |
f38e84053f | ||
![]() |
7eb16b7ba0 | ||
![]() |
5a3889d8ee | ||
![]() |
2c52f4d0bb | ||
![]() |
32c4231eb5 | ||
![]() |
e9621f054d | ||
![]() |
b41b62220c | ||
![]() |
c89440cbca | ||
![]() |
1aeb555a53 | ||
![]() |
9aff3ec5d9 | ||
![]() |
b4eaf0cfb5 | ||
![]() |
199cb6082d | ||
![]() |
37bfe26a6d | ||
![]() |
3769386539 | ||
![]() |
84a6e57f42 | ||
![]() |
14d2474ee9 | ||
![]() |
ca613ed80a | ||
![]() |
bbcd458849 | ||
![]() |
bc885894f8 | ||
![]() |
bc83df6971 | ||
![]() |
43f43c9f81 | ||
![]() |
65f00defcf | ||
![]() |
d716b81342 | ||
![]() |
272ba445f6 | ||
![]() |
d9816d8869 | ||
![]() |
874736eb16 | ||
![]() |
9c16ccbf81 | ||
![]() |
40a3aa31dc | ||
![]() |
90669b611b | ||
![]() |
f10c7ac725 | ||
![]() |
38dca3cd0d | ||
![]() |
44bb35b168 | ||
![]() |
9832caf503 | ||
![]() |
0c4e8e306e | ||
![]() |
075416eb9c | ||
![]() |
4260fbbc32 | ||
![]() |
0bec697a86 | ||
![]() |
4ca6eef8fd | ||
![]() |
a635dd9be2 | ||
![]() |
14982011d9 | ||
![]() |
65d852fdc9 | ||
![]() |
d483c23c81 | ||
![]() |
273b1e47ca | ||
![]() |
5c5cdba4cd | ||
![]() |
24674ea483 | ||
![]() |
5d3a975e4c | ||
![]() |
ad670c3c62 | ||
![]() |
f9063484f3 | ||
![]() |
5e2a07ad41 | ||
![]() |
00a3e51a93 | ||
![]() |
bc20468f28 | ||
![]() |
c7ac50a805 | ||
![]() |
f8cd7bc013 | ||
![]() |
3469bf314b | ||
![]() |
9636807819 | ||
![]() |
455251d1d4 | ||
![]() |
ec00ffae7f | ||
![]() |
d969c7ad46 | ||
![]() |
02bf769188 | ||
![]() |
1c8a6b4f2a | ||
![]() |
60fe5d6ffb | ||
![]() |
327ae7437f | ||
![]() |
795b9f2b9b | ||
![]() |
54da069e68 | ||
![]() |
bfc5cebac1 | ||
![]() |
d46b1d48d8 | ||
![]() |
96320e7761 | ||
![]() |
e6472f0a81 | ||
![]() |
816bbd9bbf | ||
![]() |
da1123271d | ||
![]() |
12f099260f | ||
![]() |
35728ae208 | ||
![]() |
7dba9cc798 | ||
![]() |
bb1c920e22 | ||
![]() |
770cbd7639 | ||
![]() |
b43c9b8d93 | ||
![]() |
935964c9b0 | ||
![]() |
9aed9a5237 | ||
![]() |
6c082c94c4 | ||
![]() |
6c93b8304a | ||
![]() |
85acf0fe78 | ||
![]() |
fe64c6dd10 | ||
![]() |
b15066a204 | ||
![]() |
e2b29b6a07 | ||
![]() |
7470ebe846 | ||
![]() |
422efa68aa | ||
![]() |
f4d8671922 | ||
![]() |
70b136c9fc | ||
![]() |
7adb5e93c7 | ||
![]() |
a8b7117878 | ||
![]() |
3bd99cd1e8 | ||
![]() |
1d6af26857 | ||
![]() |
14d18d731f | ||
![]() |
c8d7cbbc2c | ||
![]() |
ef4303fbbb | ||
![]() |
2df9a9c69b | ||
![]() |
6bb5d9f644 | ||
![]() |
94b221248a | ||
![]() |
2a887f5015 | ||
![]() |
7e3dddf1bb | ||
![]() |
fe95a7df2a | ||
![]() |
98f6a30c6b | ||
![]() |
00288053bf | ||
![]() |
6a7feb98bd | ||
![]() |
770d511067 | ||
![]() |
b57fc1f818 | ||
![]() |
01c5a8f07f | ||
![]() |
243b8a3747 | ||
![]() |
987fce7f07 | ||
![]() |
657593be09 | ||
![]() |
0afba7e3e3 | ||
![]() |
ac00bb9029 | ||
![]() |
67cb444d82 | ||
![]() |
1914a29163 | ||
![]() |
00bb4e92dc |
32
.gitea/workflows/publish.yaml
Normal file
32
.gitea/workflows/publish.yaml
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
name: Build and Publish
|
||||||
|
on: push
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: standard-22.04
|
||||||
|
steps:
|
||||||
|
- name: Check out
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set Up Java
|
||||||
|
uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '21'
|
||||||
|
#cache: 'maven'
|
||||||
|
#server-id: 'gitea'
|
||||||
|
|
||||||
|
- name: Set up Maven
|
||||||
|
uses: stCarolas/setup-maven@v5
|
||||||
|
with:
|
||||||
|
maven-version: 3.8.2
|
||||||
|
|
||||||
|
- run: cat /root/.m2/toolchains.xml
|
||||||
|
- run: cat /root/.m2/settings.xml
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: mvn -B package --file pom.xml
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
run: mvn deploy
|
||||||
|
|
58
.github/workflows/gh-mvn-publish.yml
vendored
Normal file
58
.github/workflows/gh-mvn-publish.yml
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
name: Release Artifacts to GitHub Maven Packages
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ created ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up JDK 17
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: '17'
|
||||||
|
distribution: 'temurin'
|
||||||
|
server-id: github
|
||||||
|
settings-path: ${{ github.workspace }}
|
||||||
|
|
||||||
|
- name: maven-settings-xml-action
|
||||||
|
uses: whelk-io/maven-settings-xml-action@v22
|
||||||
|
with:
|
||||||
|
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "mvn-repo-id"
|
||||||
|
replace: github
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Import GPG key
|
||||||
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
|
with:
|
||||||
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
|
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||||
|
- name: List keys
|
||||||
|
run: gpg -K
|
||||||
|
|
||||||
|
- name: Build with Maven
|
||||||
|
run: mvn --file pom.xml -U clean package -Punit-tests
|
||||||
|
|
||||||
|
- name: Publish to GitHub Packages Apache Maven
|
||||||
|
run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
119
.github/workflows/maven-publish.yml
vendored
119
.github/workflows/maven-publish.yml
vendored
@ -1,64 +1,95 @@
|
|||||||
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
||||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
||||||
|
|
||||||
name: Test and Publish Package
|
name: Release Artifacts to Maven Central
|
||||||
|
|
||||||
#on:
|
|
||||||
# release:
|
|
||||||
# types: [ "created" ]
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
branches: [ "main" ]
|
types: [ created ]
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
|
#on:
|
||||||
|
# pull_request:
|
||||||
|
# types: [ opened, reopened ]
|
||||||
|
# branches: [ "main" ]
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up JDK 11
|
|
||||||
|
- name: Set up JDK 17
|
||||||
uses: actions/setup-java@v3
|
uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
java-version: '11'
|
java-version: '17'
|
||||||
distribution: 'adopt-hotspot'
|
distribution: 'temurin'
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||||
- name: Build with Maven
|
|
||||||
run: mvn --file pom.xml -U clean package -Punit-tests
|
- name: maven-settings-xml-action
|
||||||
- name: Set up Apache Maven Central (Overwrite settings.xml)
|
uses: whelk-io/maven-settings-xml-action@v22
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with: # running setup-java again overwrites the settings.xml
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
cache: 'maven'
|
|
||||||
server-id: ossrh
|
|
||||||
server-username: MAVEN_USERNAME
|
|
||||||
server-password: MAVEN_PASSWORD
|
|
||||||
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
|
||||||
gpg-passphrase: MAVEN_GPG_PASSPHRASE
|
|
||||||
- name: Set up Maven cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
with:
|
||||||
path: ~/.m2/repository
|
servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
|
||||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
|
||||||
restore-keys: |
|
- name: Import GPG key
|
||||||
${{ runner.os }}-maven-
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
- name: Build
|
with:
|
||||||
run: mvn -B -ntp clean install
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
- name: Publish to GitHub Packages Apache Maven
|
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||||
# if: >
|
- name: List keys
|
||||||
# github.event_name != 'pull_request' &&
|
run: gpg -K
|
||||||
# github.ref_name == 'main' &&
|
|
||||||
# contains(github.event.head_commit.message, 'release')
|
- name: Find and Replace
|
||||||
run: |
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
git config --global user.email "koujalgi.amith@gmail.com"
|
with:
|
||||||
git config --global user.name "amithkoujalgi"
|
find: "ollama4j-revision"
|
||||||
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "mvn-repo-id"
|
||||||
|
replace: central
|
||||||
|
regex: false
|
||||||
|
|
||||||
|
- name: Publish to Maven Central
|
||||||
|
run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
|
||||||
|
|
||||||
|
- name: Upload Release Asset - JAR
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
env:
|
env:
|
||||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
with:
|
||||||
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
|
asset_path: target/ollama4j-${{ github.ref_name }}.jar
|
||||||
|
asset_name: ollama4j-${{ github.ref_name }}.jar
|
||||||
|
asset_content_type: application/x-jar
|
||||||
|
|
||||||
|
- name: Upload Release Asset - Javadoc JAR
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
|
asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
|
||||||
|
asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
|
||||||
|
asset_content_type: application/x-jar
|
||||||
|
|
||||||
|
- name: Upload Release Asset - Sources JAR
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ github.event.release.upload_url }}
|
||||||
|
asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
|
||||||
|
asset_name: ollama4j-${{ github.ref_name }}-sources.jar
|
||||||
|
asset_content_type: application/x-jar
|
18
.github/workflows/publish-docs.yml
vendored
18
.github/workflows/publish-docs.yml
vendored
@ -2,9 +2,8 @@
|
|||||||
name: Deploy Docs to GH Pages
|
name: Deploy Docs to GH Pages
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Runs on pushes targeting the default branch
|
release:
|
||||||
push:
|
types: [ created ]
|
||||||
branches: [ "main" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@ -47,9 +46,22 @@ jobs:
|
|||||||
- run: cd docs && npm ci
|
- run: cd docs && npm ci
|
||||||
- run: cd docs && npm run build
|
- run: cd docs && npm run build
|
||||||
|
|
||||||
|
- name: Find and Replace
|
||||||
|
uses: jacobtomlinson/gha-find-replace@v3
|
||||||
|
with:
|
||||||
|
find: "ollama4j-revision"
|
||||||
|
replace: ${{ github.ref_name }}
|
||||||
|
regex: false
|
||||||
|
|
||||||
- name: Build with Maven
|
- name: Build with Maven
|
||||||
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
||||||
|
|
||||||
|
- name: Doxygen Action
|
||||||
|
uses: mattnotmitt/doxygen-action@v1.1.0
|
||||||
|
with:
|
||||||
|
doxyfile-path: "./Doxyfile"
|
||||||
|
working-directory: "."
|
||||||
|
|
||||||
- name: Setup Pages
|
- name: Setup Pages
|
||||||
uses: actions/configure-pages@v3
|
uses: actions/configure-pages@v3
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
|
52
.github/workflows/publish-javadoc.yml
vendored
52
.github/workflows/publish-javadoc.yml
vendored
@ -1,52 +0,0 @@
|
|||||||
# Simple workflow for deploying static content to GitHub Pages
|
|
||||||
name: Deploy Javadoc content to Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
# Runs on pushes targeting the default branch
|
|
||||||
push:
|
|
||||||
branches: [ "none" ]
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
packages: write
|
|
||||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
|
||||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
|
||||||
concurrency:
|
|
||||||
group: "pages"
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Single deploy job since we're just deploying
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
environment:
|
|
||||||
name: github-pages
|
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up JDK 11
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: '11'
|
|
||||||
distribution: 'adopt-hotspot'
|
|
||||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
|
||||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
|
||||||
- name: Build with Maven
|
|
||||||
run: mvn --file pom.xml -U clean package
|
|
||||||
- name: Setup Pages
|
|
||||||
uses: actions/configure-pages@v3
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-pages-artifact@v2
|
|
||||||
with:
|
|
||||||
# Upload entire repository
|
|
||||||
path: './target/apidocs/.'
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
id: deployment
|
|
||||||
uses: actions/deploy-pages@v2
|
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -37,6 +37,8 @@ build/
|
|||||||
### Mac OS ###
|
### Mac OS ###
|
||||||
.DS_Store
|
.DS_Store
|
||||||
/.idea/
|
/.idea/
|
||||||
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
|
|
||||||
pom.xml.*
|
pom.xml.*
|
||||||
release.properties
|
release.properties
|
||||||
|
!.idea/icon.svg
|
||||||
|
|
||||||
|
src/main/java/io/github/ollama4j/localtests
|
18
.idea/icon.svg
generated
Normal file
18
.idea/icon.svg
generated
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
|
||||||
|
<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
|
||||||
|
<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
|
||||||
|
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
|
||||||
|
<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
|
||||||
|
<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
|
||||||
|
<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
|
||||||
|
<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
|
||||||
|
<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
|
||||||
|
<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
|
||||||
|
<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
|
||||||
|
<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
|
||||||
|
<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
|
||||||
|
<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
|
||||||
|
<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 6.1 KiB |
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
koujalgi.amith@gmail.com.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
413
Doxyfile
Normal file
413
Doxyfile
Normal file
@ -0,0 +1,413 @@
|
|||||||
|
# Doxyfile 1.10.0
|
||||||
|
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Project related configuration options
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
DOXYFILE_ENCODING = UTF-8
|
||||||
|
PROJECT_NAME = "Ollama4j"
|
||||||
|
PROJECT_NUMBER =
|
||||||
|
PROJECT_BRIEF = "A Java library (wrapper/binding) for Ollama server."
|
||||||
|
PROJECT_LOGO = ./logo-small.png
|
||||||
|
PROJECT_ICON = ./logo-small.png
|
||||||
|
OUTPUT_DIRECTORY = ./docs/build/doxygen
|
||||||
|
CREATE_SUBDIRS = NO
|
||||||
|
CREATE_SUBDIRS_LEVEL = 8
|
||||||
|
ALLOW_UNICODE_NAMES = NO
|
||||||
|
OUTPUT_LANGUAGE = English
|
||||||
|
BRIEF_MEMBER_DESC = YES
|
||||||
|
REPEAT_BRIEF = YES
|
||||||
|
ABBREVIATE_BRIEF = "The $name class" \
|
||||||
|
"The $name widget" \
|
||||||
|
"The $name file" \
|
||||||
|
is \
|
||||||
|
provides \
|
||||||
|
specifies \
|
||||||
|
contains \
|
||||||
|
represents \
|
||||||
|
a \
|
||||||
|
an \
|
||||||
|
the
|
||||||
|
ALWAYS_DETAILED_SEC = NO
|
||||||
|
INLINE_INHERITED_MEMB = NO
|
||||||
|
FULL_PATH_NAMES = YES
|
||||||
|
STRIP_FROM_PATH =
|
||||||
|
STRIP_FROM_INC_PATH =
|
||||||
|
SHORT_NAMES = NO
|
||||||
|
JAVADOC_AUTOBRIEF = NO
|
||||||
|
JAVADOC_BANNER = NO
|
||||||
|
QT_AUTOBRIEF = NO
|
||||||
|
MULTILINE_CPP_IS_BRIEF = NO
|
||||||
|
PYTHON_DOCSTRING = YES
|
||||||
|
INHERIT_DOCS = YES
|
||||||
|
SEPARATE_MEMBER_PAGES = NO
|
||||||
|
TAB_SIZE = 4
|
||||||
|
ALIASES =
|
||||||
|
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||||
|
OPTIMIZE_OUTPUT_JAVA = YES
|
||||||
|
OPTIMIZE_FOR_FORTRAN = NO
|
||||||
|
OPTIMIZE_OUTPUT_VHDL = NO
|
||||||
|
OPTIMIZE_OUTPUT_SLICE = NO
|
||||||
|
EXTENSION_MAPPING =
|
||||||
|
MARKDOWN_SUPPORT = YES
|
||||||
|
TOC_INCLUDE_HEADINGS = 5
|
||||||
|
MARKDOWN_ID_STYLE = DOXYGEN
|
||||||
|
AUTOLINK_SUPPORT = YES
|
||||||
|
BUILTIN_STL_SUPPORT = NO
|
||||||
|
CPP_CLI_SUPPORT = NO
|
||||||
|
SIP_SUPPORT = NO
|
||||||
|
IDL_PROPERTY_SUPPORT = YES
|
||||||
|
DISTRIBUTE_GROUP_DOC = NO
|
||||||
|
GROUP_NESTED_COMPOUNDS = NO
|
||||||
|
SUBGROUPING = YES
|
||||||
|
INLINE_GROUPED_CLASSES = NO
|
||||||
|
INLINE_SIMPLE_STRUCTS = NO
|
||||||
|
TYPEDEF_HIDES_STRUCT = NO
|
||||||
|
LOOKUP_CACHE_SIZE = 0
|
||||||
|
NUM_PROC_THREADS = 1
|
||||||
|
TIMESTAMP = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Build related configuration options
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
EXTRACT_ALL = YES
|
||||||
|
EXTRACT_PRIVATE = NO
|
||||||
|
EXTRACT_PRIV_VIRTUAL = NO
|
||||||
|
EXTRACT_PACKAGE = NO
|
||||||
|
EXTRACT_STATIC = NO
|
||||||
|
EXTRACT_LOCAL_CLASSES = YES
|
||||||
|
EXTRACT_LOCAL_METHODS = NO
|
||||||
|
EXTRACT_ANON_NSPACES = NO
|
||||||
|
RESOLVE_UNNAMED_PARAMS = YES
|
||||||
|
HIDE_UNDOC_MEMBERS = NO
|
||||||
|
HIDE_UNDOC_CLASSES = NO
|
||||||
|
HIDE_FRIEND_COMPOUNDS = NO
|
||||||
|
HIDE_IN_BODY_DOCS = NO
|
||||||
|
INTERNAL_DOCS = NO
|
||||||
|
CASE_SENSE_NAMES = SYSTEM
|
||||||
|
HIDE_SCOPE_NAMES = NO
|
||||||
|
HIDE_COMPOUND_REFERENCE= NO
|
||||||
|
SHOW_HEADERFILE = YES
|
||||||
|
SHOW_INCLUDE_FILES = YES
|
||||||
|
SHOW_GROUPED_MEMB_INC = NO
|
||||||
|
FORCE_LOCAL_INCLUDES = NO
|
||||||
|
INLINE_INFO = YES
|
||||||
|
SORT_MEMBER_DOCS = YES
|
||||||
|
SORT_BRIEF_DOCS = NO
|
||||||
|
SORT_MEMBERS_CTORS_1ST = NO
|
||||||
|
SORT_GROUP_NAMES = NO
|
||||||
|
SORT_BY_SCOPE_NAME = NO
|
||||||
|
STRICT_PROTO_MATCHING = NO
|
||||||
|
GENERATE_TODOLIST = YES
|
||||||
|
GENERATE_TESTLIST = YES
|
||||||
|
GENERATE_BUGLIST = YES
|
||||||
|
GENERATE_DEPRECATEDLIST= YES
|
||||||
|
ENABLED_SECTIONS =
|
||||||
|
MAX_INITIALIZER_LINES = 30
|
||||||
|
SHOW_USED_FILES = YES
|
||||||
|
SHOW_FILES = YES
|
||||||
|
SHOW_NAMESPACES = YES
|
||||||
|
FILE_VERSION_FILTER =
|
||||||
|
LAYOUT_FILE =
|
||||||
|
CITE_BIB_FILES =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to warning and progress messages
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
QUIET = NO
|
||||||
|
WARNINGS = YES
|
||||||
|
WARN_IF_UNDOCUMENTED = YES
|
||||||
|
WARN_IF_DOC_ERROR = YES
|
||||||
|
WARN_IF_INCOMPLETE_DOC = YES
|
||||||
|
WARN_NO_PARAMDOC = NO
|
||||||
|
WARN_IF_UNDOC_ENUM_VAL = NO
|
||||||
|
WARN_AS_ERROR = NO
|
||||||
|
WARN_FORMAT = "$file:$line: $text"
|
||||||
|
WARN_LINE_FORMAT = "at line $line of file $file"
|
||||||
|
WARN_LOGFILE =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the input files
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
INPUT = ./src/main
|
||||||
|
INPUT_ENCODING = UTF-8
|
||||||
|
INPUT_FILE_ENCODING =
|
||||||
|
FILE_PATTERNS = *.c \
|
||||||
|
*.cc \
|
||||||
|
*.cxx \
|
||||||
|
*.cxxm \
|
||||||
|
*.cpp \
|
||||||
|
*.cppm \
|
||||||
|
*.ccm \
|
||||||
|
*.c++ \
|
||||||
|
*.c++m \
|
||||||
|
*.java \
|
||||||
|
*.ii \
|
||||||
|
*.ixx \
|
||||||
|
*.ipp \
|
||||||
|
*.i++ \
|
||||||
|
*.inl \
|
||||||
|
*.idl \
|
||||||
|
*.ddl \
|
||||||
|
*.odl \
|
||||||
|
*.h \
|
||||||
|
*.hh \
|
||||||
|
*.hxx \
|
||||||
|
*.hpp \
|
||||||
|
*.h++ \
|
||||||
|
*.ixx \
|
||||||
|
*.l \
|
||||||
|
*.cs \
|
||||||
|
*.d \
|
||||||
|
*.php \
|
||||||
|
*.php4 \
|
||||||
|
*.php5 \
|
||||||
|
*.phtml \
|
||||||
|
*.inc \
|
||||||
|
*.m \
|
||||||
|
*.markdown \
|
||||||
|
*.md \
|
||||||
|
*.mm \
|
||||||
|
*.dox \
|
||||||
|
*.py \
|
||||||
|
*.pyw \
|
||||||
|
*.f90 \
|
||||||
|
*.f95 \
|
||||||
|
*.f03 \
|
||||||
|
*.f08 \
|
||||||
|
*.f18 \
|
||||||
|
*.f \
|
||||||
|
*.for \
|
||||||
|
*.vhd \
|
||||||
|
*.vhdl \
|
||||||
|
*.ucf \
|
||||||
|
*.qsf \
|
||||||
|
*.ice
|
||||||
|
RECURSIVE = YES
|
||||||
|
EXCLUDE =
|
||||||
|
EXCLUDE_SYMLINKS = NO
|
||||||
|
EXCLUDE_PATTERNS =
|
||||||
|
EXCLUDE_SYMBOLS =
|
||||||
|
EXAMPLE_PATH =
|
||||||
|
EXAMPLE_PATTERNS = *
|
||||||
|
EXAMPLE_RECURSIVE = NO
|
||||||
|
IMAGE_PATH =
|
||||||
|
INPUT_FILTER =
|
||||||
|
FILTER_PATTERNS =
|
||||||
|
FILTER_SOURCE_FILES = NO
|
||||||
|
FILTER_SOURCE_PATTERNS =
|
||||||
|
USE_MDFILE_AS_MAINPAGE =
|
||||||
|
FORTRAN_COMMENT_AFTER = 72
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to source browsing
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
SOURCE_BROWSER = YES
|
||||||
|
INLINE_SOURCES = NO
|
||||||
|
STRIP_CODE_COMMENTS = YES
|
||||||
|
REFERENCED_BY_RELATION = NO
|
||||||
|
REFERENCES_RELATION = NO
|
||||||
|
REFERENCES_LINK_SOURCE = YES
|
||||||
|
SOURCE_TOOLTIPS = YES
|
||||||
|
USE_HTAGS = NO
|
||||||
|
VERBATIM_HEADERS = YES
|
||||||
|
CLANG_ASSISTED_PARSING = NO
|
||||||
|
CLANG_ADD_INC_PATHS = YES
|
||||||
|
CLANG_OPTIONS =
|
||||||
|
CLANG_DATABASE_PATH =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the alphabetical class index
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
ALPHABETICAL_INDEX = YES
|
||||||
|
IGNORE_PREFIX =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the HTML output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_HTML = YES
|
||||||
|
HTML_OUTPUT = html
|
||||||
|
HTML_FILE_EXTENSION = .html
|
||||||
|
HTML_HEADER =
|
||||||
|
HTML_FOOTER =
|
||||||
|
HTML_STYLESHEET =
|
||||||
|
HTML_EXTRA_STYLESHEET =
|
||||||
|
HTML_EXTRA_FILES =
|
||||||
|
HTML_COLORSTYLE = LIGHT
|
||||||
|
HTML_COLORSTYLE_HUE = 220
|
||||||
|
HTML_COLORSTYLE_SAT = 100
|
||||||
|
HTML_COLORSTYLE_GAMMA = 80
|
||||||
|
HTML_DYNAMIC_MENUS = YES
|
||||||
|
HTML_DYNAMIC_SECTIONS = NO
|
||||||
|
HTML_CODE_FOLDING = YES
|
||||||
|
HTML_COPY_CLIPBOARD = YES
|
||||||
|
HTML_PROJECT_COOKIE =
|
||||||
|
HTML_INDEX_NUM_ENTRIES = 100
|
||||||
|
GENERATE_DOCSET = NO
|
||||||
|
DOCSET_FEEDNAME = "Doxygen generated docs"
|
||||||
|
DOCSET_FEEDURL =
|
||||||
|
DOCSET_BUNDLE_ID = org.doxygen.Project
|
||||||
|
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
|
||||||
|
DOCSET_PUBLISHER_NAME = Publisher
|
||||||
|
GENERATE_HTMLHELP = NO
|
||||||
|
CHM_FILE =
|
||||||
|
HHC_LOCATION =
|
||||||
|
GENERATE_CHI = NO
|
||||||
|
CHM_INDEX_ENCODING =
|
||||||
|
BINARY_TOC = NO
|
||||||
|
TOC_EXPAND = NO
|
||||||
|
SITEMAP_URL =
|
||||||
|
GENERATE_QHP = NO
|
||||||
|
QCH_FILE =
|
||||||
|
QHP_NAMESPACE = org.doxygen.Project
|
||||||
|
QHP_VIRTUAL_FOLDER = doc
|
||||||
|
QHP_CUST_FILTER_NAME =
|
||||||
|
QHP_CUST_FILTER_ATTRS =
|
||||||
|
QHP_SECT_FILTER_ATTRS =
|
||||||
|
QHG_LOCATION =
|
||||||
|
GENERATE_ECLIPSEHELP = NO
|
||||||
|
ECLIPSE_DOC_ID = org.doxygen.Project
|
||||||
|
DISABLE_INDEX = NO
|
||||||
|
GENERATE_TREEVIEW = YES
|
||||||
|
FULL_SIDEBAR = NO
|
||||||
|
ENUM_VALUES_PER_LINE = 4
|
||||||
|
TREEVIEW_WIDTH = 250
|
||||||
|
EXT_LINKS_IN_WINDOW = NO
|
||||||
|
OBFUSCATE_EMAILS = YES
|
||||||
|
HTML_FORMULA_FORMAT = png
|
||||||
|
FORMULA_FONTSIZE = 10
|
||||||
|
FORMULA_MACROFILE =
|
||||||
|
USE_MATHJAX = NO
|
||||||
|
MATHJAX_VERSION = MathJax_2
|
||||||
|
MATHJAX_FORMAT = HTML-CSS
|
||||||
|
MATHJAX_RELPATH =
|
||||||
|
MATHJAX_EXTENSIONS =
|
||||||
|
MATHJAX_CODEFILE =
|
||||||
|
SEARCHENGINE = YES
|
||||||
|
SERVER_BASED_SEARCH = NO
|
||||||
|
EXTERNAL_SEARCH = NO
|
||||||
|
SEARCHENGINE_URL =
|
||||||
|
SEARCHDATA_FILE = searchdata.xml
|
||||||
|
EXTERNAL_SEARCH_ID =
|
||||||
|
EXTRA_SEARCH_MAPPINGS =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the LaTeX output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_LATEX = YES
|
||||||
|
LATEX_OUTPUT = latex
|
||||||
|
LATEX_CMD_NAME =
|
||||||
|
MAKEINDEX_CMD_NAME = makeindex
|
||||||
|
LATEX_MAKEINDEX_CMD = makeindex
|
||||||
|
COMPACT_LATEX = NO
|
||||||
|
PAPER_TYPE = a4
|
||||||
|
EXTRA_PACKAGES =
|
||||||
|
LATEX_HEADER =
|
||||||
|
LATEX_FOOTER =
|
||||||
|
LATEX_EXTRA_STYLESHEET =
|
||||||
|
LATEX_EXTRA_FILES =
|
||||||
|
PDF_HYPERLINKS = YES
|
||||||
|
USE_PDFLATEX = YES
|
||||||
|
LATEX_BATCHMODE = NO
|
||||||
|
LATEX_HIDE_INDICES = NO
|
||||||
|
LATEX_BIB_STYLE = plain
|
||||||
|
LATEX_EMOJI_DIRECTORY =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the RTF output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_RTF = NO
|
||||||
|
RTF_OUTPUT = rtf
|
||||||
|
COMPACT_RTF = NO
|
||||||
|
RTF_HYPERLINKS = NO
|
||||||
|
RTF_STYLESHEET_FILE =
|
||||||
|
RTF_EXTENSIONS_FILE =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the man page output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_MAN = NO
|
||||||
|
MAN_OUTPUT = man
|
||||||
|
MAN_EXTENSION = .3
|
||||||
|
MAN_SUBDIR =
|
||||||
|
MAN_LINKS = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the XML output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_XML = NO
|
||||||
|
XML_OUTPUT = xml
|
||||||
|
XML_PROGRAMLISTING = YES
|
||||||
|
XML_NS_MEMB_FILE_SCOPE = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the DOCBOOK output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_DOCBOOK = NO
|
||||||
|
DOCBOOK_OUTPUT = docbook
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options for the AutoGen Definitions output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_AUTOGEN_DEF = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to Sqlite3 output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_SQLITE3 = NO
|
||||||
|
SQLITE3_OUTPUT = sqlite3
|
||||||
|
SQLITE3_RECREATE_DB = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the Perl module output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_PERLMOD = NO
|
||||||
|
PERLMOD_LATEX = NO
|
||||||
|
PERLMOD_PRETTY = YES
|
||||||
|
PERLMOD_MAKEVAR_PREFIX =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the preprocessor
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
ENABLE_PREPROCESSING = YES
|
||||||
|
MACRO_EXPANSION = NO
|
||||||
|
EXPAND_ONLY_PREDEF = NO
|
||||||
|
SEARCH_INCLUDES = YES
|
||||||
|
INCLUDE_PATH =
|
||||||
|
INCLUDE_FILE_PATTERNS =
|
||||||
|
PREDEFINED =
|
||||||
|
EXPAND_AS_DEFINED =
|
||||||
|
SKIP_FUNCTION_MACROS = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to external references
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
TAGFILES =
|
||||||
|
GENERATE_TAGFILE =
|
||||||
|
ALLEXTERNALS = NO
|
||||||
|
EXTERNAL_GROUPS = YES
|
||||||
|
EXTERNAL_PAGES = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to diagram generator tools
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
HIDE_UNDOC_RELATIONS = YES
|
||||||
|
HAVE_DOT = NO
|
||||||
|
DOT_NUM_THREADS = 0
|
||||||
|
DOT_COMMON_ATTR = "fontname=Helvetica,fontsize=10"
|
||||||
|
DOT_EDGE_ATTR = "labelfontname=Helvetica,labelfontsize=10"
|
||||||
|
DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
|
||||||
|
DOT_FONTPATH =
|
||||||
|
CLASS_GRAPH = YES
|
||||||
|
COLLABORATION_GRAPH = YES
|
||||||
|
GROUP_GRAPHS = YES
|
||||||
|
UML_LOOK = NO
|
||||||
|
UML_LIMIT_NUM_FIELDS = 10
|
||||||
|
DOT_UML_DETAILS = NO
|
||||||
|
DOT_WRAP_THRESHOLD = 17
|
||||||
|
TEMPLATE_RELATIONS = NO
|
||||||
|
INCLUDE_GRAPH = YES
|
||||||
|
INCLUDED_BY_GRAPH = YES
|
||||||
|
CALL_GRAPH = NO
|
||||||
|
CALLER_GRAPH = NO
|
||||||
|
GRAPHICAL_HIERARCHY = YES
|
||||||
|
DIRECTORY_GRAPH = YES
|
||||||
|
DIR_GRAPH_MAX_DEPTH = 1
|
||||||
|
DOT_IMAGE_FORMAT = png
|
||||||
|
INTERACTIVE_SVG = NO
|
||||||
|
DOT_PATH =
|
||||||
|
DOTFILE_DIRS =
|
||||||
|
DIA_PATH =
|
||||||
|
DIAFILE_DIRS =
|
||||||
|
PLANTUML_JAR_PATH =
|
||||||
|
PLANTUML_CFG_FILE =
|
||||||
|
PLANTUML_INCLUDE_PATH =
|
||||||
|
DOT_GRAPH_MAX_NODES = 50
|
||||||
|
MAX_DOT_GRAPH_DEPTH = 0
|
||||||
|
DOT_MULTI_TARGETS = NO
|
||||||
|
GENERATE_LEGEND = YES
|
||||||
|
DOT_CLEANUP = YES
|
||||||
|
MSCGEN_TOOL =
|
||||||
|
MSCFILE_DIRS =
|
21
Makefile
21
Makefile
@ -1,13 +1,28 @@
|
|||||||
build:
|
build:
|
||||||
mvn -B clean install
|
mvn -B clean install
|
||||||
|
|
||||||
ut:
|
unit-tests:
|
||||||
mvn clean test -Punit-tests
|
mvn clean test -Punit-tests
|
||||||
|
|
||||||
it:
|
integration-tests:
|
||||||
mvn clean verify -Pintegration-tests
|
mvn clean verify -Pintegration-tests
|
||||||
|
|
||||||
|
doxygen:
|
||||||
|
doxygen Doxyfile
|
||||||
|
|
||||||
list-releases:
|
list-releases:
|
||||||
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
||||||
--compressed \
|
--compressed \
|
||||||
--silent | jq '.components[].version'
|
--silent | jq '.components[].version'
|
||||||
|
|
||||||
|
build-docs:
|
||||||
|
npm i --prefix docs && npm run build --prefix docs
|
||||||
|
|
||||||
|
start-docs:
|
||||||
|
npm i --prefix docs && npm run start --prefix docs
|
||||||
|
|
||||||
|
start-cpu:
|
||||||
|
docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||||
|
|
||||||
|
start-gpu:
|
||||||
|
docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
324
README.md
324
README.md
@ -1,9 +1,58 @@
|
|||||||
### Ollama4j
|
### Ollama4j
|
||||||
|
|
||||||
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
<p align="center">
|
||||||
|
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
||||||
|
</p>
|
||||||
|
|
||||||
A Java library (wrapper/binding)
|
|
||||||
for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
|
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
|
||||||
|
|
||||||
|
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|

|
||||||
|
[](https://codecov.io/gh/ollama4j/ollama4j)
|
||||||
|

|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [How does it work?](#how-does-it-work)
|
||||||
|
- [Requirements](#requirements)
|
||||||
|
- [Installation](#installation)
|
||||||
|
- [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
|
||||||
|
- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
|
||||||
|
- [Development](#development)
|
||||||
|
- [Contributions](#get-involved)
|
||||||
|
- [References](#references)
|
||||||
|
|
||||||
|
#### How does it work?
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
flowchart LR
|
flowchart LR
|
||||||
@ -17,64 +66,183 @@ for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
|
|||||||
end
|
end
|
||||||
```
|
```
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- [Requirements](#requirements)
|
|
||||||
- [Installation](#installation)
|
|
||||||
- [API Spec](#api-spec)
|
|
||||||
- [Demo APIs](#try-out-the-apis-with-ollama-server)
|
|
||||||
- [Development](#development)
|
|
||||||
- [Contributions](#get-involved)
|
|
||||||
|
|
||||||
#### Requirements
|
#### Requirements
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
[![][ollama-shield]][ollama] Or [![][ollama-docker-shield]][ollama-docker]
|
|
||||||
|
|
||||||
[ollama]: https://ollama.ai/
|
<a href="https://ollama.com/" target="_blank">
|
||||||
|
<img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
|
||||||
|
</a>
|
||||||
|
|
||||||
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
|
<a href="https://ollama.ai/" target="_blank">Local Installation</a>
|
||||||
|
|
||||||
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
|
</td>
|
||||||
|
|
||||||
#### Installation
|
<td>
|
||||||
|
|
||||||
|
<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
|
||||||
|
|
||||||
|
<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
|
||||||
|
|
||||||
|
Install on Linux
|
||||||
|
|
||||||
|
```shell
|
||||||
|
curl -fsSL https://ollama.com/install.sh | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
CPU only
|
||||||
|
|
||||||
|
```shell
|
||||||
|
docker run -d -p 11434:11434 \
|
||||||
|
-v ollama:/root/.ollama \
|
||||||
|
--name ollama \
|
||||||
|
ollama/ollama
|
||||||
|
```
|
||||||
|
|
||||||
|
NVIDIA GPU
|
||||||
|
|
||||||
|
```shell
|
||||||
|
docker run -d -p 11434:11434 \
|
||||||
|
--gpus=all \
|
||||||
|
-v ollama:/root/.ollama \
|
||||||
|
--name ollama \
|
||||||
|
ollama/ollama
|
||||||
|
```
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> We are now publishing the artifacts to both Maven Central and GitHub package repositories.
|
||||||
|
>
|
||||||
|
> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
|
||||||
|
> according to your requirements.
|
||||||
|
|
||||||
|
### For Maven
|
||||||
|
|
||||||
|
#### Using [Maven Central](https://central.sonatype.com/)
|
||||||
|
|
||||||
|
[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
|
||||||
|
|
||||||
|
[ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
|
||||||
|
|
||||||
|
[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
|
||||||
|
|
||||||
In your Maven project, add this dependency:
|
In your Maven project, add this dependency:
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.ollama4j</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.29</version>
|
<version>1.0.79</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
Latest release: 
|
#### Using GitHub's Maven Package Repository
|
||||||
|
|
||||||
[![][lib-shield]][lib]
|
[![][ollama4j-releases-shield]][ollama4j-releases-link]
|
||||||
|
|
||||||
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
|
[ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
|
||||||
|
|
||||||
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=for-the-badge&labelColor=gray
|
[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
|
||||||
|
|
||||||
|
1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>github</id>
|
||||||
|
<name>GitHub Apache Maven Packages</name>
|
||||||
|
<url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
|
||||||
|
<releases>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</releases>
|
||||||
|
<snapshots>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</snapshots>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||||
|
http://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||||
|
<servers>
|
||||||
|
<server>
|
||||||
|
<id>github</id>
|
||||||
|
<username>YOUR-USERNAME</username>
|
||||||
|
<password>YOUR-TOKEN</password>
|
||||||
|
</server>
|
||||||
|
</servers>
|
||||||
|
</settings>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. In your Maven project, add this dependency:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.github.ollama4j</groupId>
|
||||||
|
<artifactId>ollama4j</artifactId>
|
||||||
|
<version>1.0.79</version>
|
||||||
|
</dependency>
|
||||||
|
```
|
||||||
|
|
||||||
|
### For Gradle
|
||||||
|
|
||||||
|
1. Add the dependency
|
||||||
|
|
||||||
|
```groovy
|
||||||
|
dependencies {
|
||||||
|
implementation 'io.github.ollama4j:ollama4j:1.0.79'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[//]: # (Latest release:)
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ([![][lib-shield]][lib])
|
||||||
|
|
||||||
|
[lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
|
||||||
|
|
||||||
|
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
|
||||||
|
|
||||||
#### API Spec
|
#### API Spec
|
||||||
|
|
||||||
Find the full `Javadoc` (API specifications) [here](https://amithkoujalgi.github.io/ollama4j/).
|
> [!TIP]
|
||||||
|
> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
|
||||||
|
|
||||||
#### Development
|
#### Development
|
||||||
|
|
||||||
@ -87,52 +255,82 @@ make build
|
|||||||
Run unit tests:
|
Run unit tests:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
make ut
|
make unit-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
Run integration tests:
|
Run integration tests:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
make it
|
make integration-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Releases
|
#### Releases
|
||||||
|
|
||||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
|
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
|
||||||
Actions CI workflow.
|
|
||||||
|
#### Who's using Ollama4j?
|
||||||
|
|
||||||
|
- `Datafaker`: a library to generate fake data
|
||||||
|
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
|
||||||
|
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
|
||||||
|
- https://github.com/TEAMPB/ollama4j-vaadin-ui
|
||||||
|
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
|
||||||
|
server to translate all messages into a specfic target language.
|
||||||
|
- https://github.com/liebki/ollama-translator
|
||||||
|
- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
|
||||||
|
Ollama4j. https://github.com/ollama4j/ollama4j-web-ui
|
||||||
|
|
||||||
#### Traction
|
#### Traction
|
||||||
|
|
||||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
|
[](https://star-history.com/#ollama4j/ollama4j&Date)
|
||||||
|
|
||||||
### Areas of improvement
|
|
||||||
|
|
||||||
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
|
|
||||||
snake-case conventions. (
|
|
||||||
possibly with Jackson-mapper's `@JsonProperty`)
|
|
||||||
- [x] Fix deprecated HTTP client code
|
|
||||||
- [x] Setup logging
|
|
||||||
- [x] Use lombok
|
|
||||||
- [x] Update request body creation with Java objects
|
|
||||||
- [ ] Async APIs for images
|
|
||||||
- [ ] Add additional params for `ask` APIs such as:
|
|
||||||
- `options`: additional model parameters for the Modelfile such as `temperature`
|
|
||||||
- `system`: system prompt to (overrides what is defined in the Modelfile)
|
|
||||||
- `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
|
|
||||||
- `context`: the context parameter returned from a previous request, which can be used to keep a
|
|
||||||
short
|
|
||||||
conversational memory
|
|
||||||
- `stream`: Add support for streaming responses from the model
|
|
||||||
- [ ] Add test cases
|
|
||||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
|
|
||||||
|
|
||||||
### Get Involved
|
### Get Involved
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
<a href=""></a>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
[//]: # ()
|
||||||
|
|
||||||
|
|
||||||
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
|
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
|
||||||
with code - any sort
|
with code - any sort
|
||||||
of contribution is much appreciated.
|
of contribution is much appreciated.
|
||||||
|
|
||||||
|
### References
|
||||||
|
|
||||||
|
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
||||||
|
|
||||||
### Credits
|
### Credits
|
||||||
|
|
||||||
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
|
||||||
project.
|
project.
|
||||||
|
|
||||||
|
**Thanks to the amazing contributors**
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" alt=""/>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
### Appreciate my work?
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
|
||||||
|
</p>
|
||||||
|
@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
|
|||||||
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
|
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
|
||||||
applications! 🌐🚀
|
applications! 🌐🚀
|
||||||
|
|
||||||
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
|
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
|
||||||
|
|
||||||
🌟 Key Features:
|
🌟 Key Features:
|
||||||
|
|
||||||
@ -58,9 +58,9 @@ elevate your projects.
|
|||||||
|
|
||||||
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
|
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
|
||||||
|
|
||||||
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/
|
Find the full API spec here: https://ollama4j.github.io/ollama4j/
|
||||||
|
|
||||||
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/
|
Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
|
||||||
|
|
||||||
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).
|
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).
|
||||||
|
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
---
|
|
||||||
sidebar_position: 2
|
|
||||||
---
|
|
||||||
|
|
||||||
# Ask - Async
|
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a asynchronous way.
|
|
||||||
These APIs correlate to
|
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
|
||||||
|
|
||||||
```java
|
|
||||||
public class Main {
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
|
||||||
|
|
||||||
String prompt = "Who are you?";
|
|
||||||
|
|
||||||
OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
|
|
||||||
|
|
||||||
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
|
|
||||||
// poll for data from the response stream
|
|
||||||
String result = callback.getStream().poll();
|
|
||||||
if (response != null) {
|
|
||||||
System.out.print(result.getResponse());
|
|
||||||
}
|
|
||||||
Thread.sleep(100);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You will get a response similar to:
|
|
||||||
|
|
||||||
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
|
|
||||||
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
|
|
||||||
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
|
|
||||||
> require
|
|
||||||
> natural language understanding and generation capabilities.
|
|
@ -1,106 +0,0 @@
|
|||||||
---
|
|
||||||
sidebar_position: 1
|
|
||||||
---
|
|
||||||
|
|
||||||
# Ask - Sync
|
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a synchronous way.
|
|
||||||
These APIs correlate to
|
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
|
||||||
|
|
||||||
## Try asking a question about the model.
|
|
||||||
|
|
||||||
```java
|
|
||||||
public class Main {
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?");
|
|
||||||
|
|
||||||
System.out.println(result.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You will get a response similar to:
|
|
||||||
|
|
||||||
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
|
|
||||||
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
|
|
||||||
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
|
|
||||||
> require
|
|
||||||
> natural language understanding and generation capabilities.
|
|
||||||
|
|
||||||
## Try asking a question from general topics.
|
|
||||||
|
|
||||||
```java
|
|
||||||
public class Main {
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
|
||||||
|
|
||||||
String prompt = "List all cricket world cup teams of 2019.";
|
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.ask(OllamaModelType.LLAMA2, prompt);
|
|
||||||
|
|
||||||
System.out.println(result.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You'd then get a response from the model:
|
|
||||||
|
|
||||||
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
|
|
||||||
> following teams
|
|
||||||
> participated in the tournament:
|
|
||||||
>
|
|
||||||
> 1. Afghanistan
|
|
||||||
> 2. Australia
|
|
||||||
> 3. Bangladesh
|
|
||||||
> 4. England
|
|
||||||
> 5. India
|
|
||||||
> 6. New Zealand
|
|
||||||
> 7. Pakistan
|
|
||||||
> 8. South Africa
|
|
||||||
> 9. Sri Lanka
|
|
||||||
> 10. West Indies
|
|
||||||
>
|
|
||||||
> These teams competed in a round-robin format, with the top four teams advancing to the
|
|
||||||
> semi-finals. The tournament was
|
|
||||||
> won by the England cricket team, who defeated New Zealand in the final.
|
|
||||||
|
|
||||||
## Try asking for a Database query for your data schema.
|
|
||||||
|
|
||||||
```java
|
|
||||||
public class Main {
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
String host = "http://localhost:11434/";
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
|
||||||
|
|
||||||
String prompt = SamplePrompts.getSampleDatabasePromptWithQuestion(
|
|
||||||
"List all customer names who have bought one or more products");
|
|
||||||
OllamaResult result = ollamaAPI.ask(OllamaModelType.SQLCODER, prompt);
|
|
||||||
System.out.println(result.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
_Note: Here I've used
|
|
||||||
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
|
|
||||||
containing a database schema from within this library for demonstration purposes._
|
|
||||||
|
|
||||||
You'd then get a response from the model:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
SELECT customers.name
|
|
||||||
FROM sales
|
|
||||||
JOIN customers ON sales.customer_id = customers.customer_id
|
|
||||||
GROUP BY customers.name;
|
|
||||||
```
|
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"label": "APIs - Extras",
|
"label": "APIs - Extras",
|
||||||
"position": 10,
|
"position": 4,
|
||||||
"link": {
|
"link": {
|
||||||
"type": "generated-index",
|
"type": "generated-index",
|
||||||
"description": "Details of APIs to handle bunch of extra stuff."
|
"description": "Details of APIs to handle bunch of extra stuff."
|
||||||
|
26
docs/docs/apis-extras/basic-auth.md
Normal file
26
docs/docs/apis-extras/basic-auth.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Set Basic Authentication
|
||||||
|
|
||||||
|
This API lets you set the basic authentication for the Ollama client. This would help in scenarios where
|
||||||
|
Ollama server would be setup behind a gateway/reverse proxy with basic auth.
|
||||||
|
|
||||||
|
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
ollamaAPI.setBasicAuth("username", "password");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
79
docs/docs/apis-extras/options-builder.md
Normal file
79
docs/docs/apis-extras/options-builder.md
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
---
|
||||||
|
|
||||||
|
# Options Builder
|
||||||
|
|
||||||
|
This lets you build options for the `ask()` API.
|
||||||
|
|
||||||
|
Following are the parameters supported by Ollama:
|
||||||
|
|
||||||
|
| Parameter | Description | Value Type | Example Usage |
|
||||||
|
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
|
||||||
|
| mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 |
|
||||||
|
| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 |
|
||||||
|
| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 |
|
||||||
|
| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 |
|
||||||
|
| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |
|
||||||
|
| num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |
|
||||||
|
| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 |
|
||||||
|
| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 |
|
||||||
|
| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 |
|
||||||
|
| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 |
|
||||||
|
| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 |
|
||||||
|
| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |
|
||||||
|
| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 |
|
||||||
|
| num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 |
|
||||||
|
| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 |
|
||||||
|
| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 |
|
||||||
|
|
||||||
|
Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||||
|
|
||||||
|
Also, see how to set those Ollama parameters using
|
||||||
|
the `OptionsBuilder`
|
||||||
|
from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
|
||||||
|
|
||||||
|
## Build an empty `Options` object
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
Options options = new OptionsBuilder().build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build the `Options` object with values
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
Options options =
|
||||||
|
new OptionsBuilder()
|
||||||
|
.setMirostat(10)
|
||||||
|
.setMirostatEta(0.5f)
|
||||||
|
.setNumGpu(2)
|
||||||
|
.setTemperature(1.5f)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
@ -7,6 +7,8 @@ sidebar_position: 3
|
|||||||
This API lets you check the reachability of Ollama server.
|
This API lets you check the reachability of Ollama server.
|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
30
docs/docs/apis-extras/ps.md
Normal file
30
docs/docs/apis-extras/ps.md
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 4
|
||||||
|
---
|
||||||
|
|
||||||
|
# PS
|
||||||
|
|
||||||
|
This API provides a list of running models and details about each model currently loaded into memory.
|
||||||
|
|
||||||
|
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
|
||||||
|
|
||||||
|
```java
|
||||||
|
package io.github.ollama4j.localtests;
|
||||||
|
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
|
||||||
|
|
||||||
|
ModelsProcessResponse response = ollamaAPI.ps();
|
||||||
|
|
||||||
|
System.out.println(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
@ -7,6 +7,8 @@ sidebar_position: 2
|
|||||||
This API lets you set the request timeout for the Ollama client.
|
This API lets you set the request timeout for the Ollama client.
|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client.
|
|||||||
## Try asking a question about the model.
|
## Try asking a question about the model.
|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"label": "APIs - Ask",
|
"label": "APIs - Generate",
|
||||||
"position": 10,
|
"position": 3,
|
||||||
"link": {
|
"link": {
|
||||||
"type": "generated-index",
|
"type": "generated-index",
|
||||||
"description": "Details of APIs to interact with LLMs."
|
"description": "Details of APIs to interact with LLMs."
|
272
docs/docs/apis-generate/chat.md
Normal file
272
docs/docs/apis-generate/chat.md
Normal file
@ -0,0 +1,272 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 7
|
||||||
|
---
|
||||||
|
|
||||||
|
# Chat
|
||||||
|
|
||||||
|
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
|
||||||
|
information using the history of already asked questions and the respective answers.
|
||||||
|
|
||||||
|
## Create a new conversation and use chat history to augment follow up questions
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
|
||||||
|
// create first user question
|
||||||
|
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// start conversation with model
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println("First answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
|
// create next userQuestion
|
||||||
|
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
|
||||||
|
|
||||||
|
// "continue" conversation with model
|
||||||
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println("Second answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
|
System.out.println("Chat History: " + chatResult.getChatHistory());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> First answer: Should be Paris!
|
||||||
|
>
|
||||||
|
> Second answer: Marseille.
|
||||||
|
>
|
||||||
|
> Chat History:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "What is the capital of France?",
|
||||||
|
"images": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": "Should be Paris!",
|
||||||
|
"images": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "And what is the second largest city?",
|
||||||
|
"images": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": "Marseille.",
|
||||||
|
"images": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Conversational loop
|
||||||
|
|
||||||
|
```java
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
|
||||||
|
|
||||||
|
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
|
||||||
|
OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
|
||||||
|
System.out.println(initialChatResult.getResponse());
|
||||||
|
|
||||||
|
List<OllamaChatMessage> history = initialChatResult.getChatHistory();
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
|
||||||
|
System.out.println(chatResult.getResponse());
|
||||||
|
history = chatResult.getChatHistory();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create a conversation where the answer is streamed
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
|
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
||||||
|
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// define a handler (Consumer<String>)
|
||||||
|
OllamaStreamHandler streamHandler = (s) -> {
|
||||||
|
System.out.println(s);
|
||||||
|
};
|
||||||
|
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> The
|
||||||
|
> The capital
|
||||||
|
> The capital of
|
||||||
|
> The capital of France
|
||||||
|
> The capital of France is
|
||||||
|
> The capital of France is Paris
|
||||||
|
> The capital of France is Paris.
|
||||||
|
|
||||||
|
## Use a simple Console Output Stream Handler
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
|
||||||
|
.build();
|
||||||
|
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
|
||||||
|
ollamaAPI.chat(requestModel, streamHandler);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create a new conversation with individual system prompt
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
|
||||||
|
// create request with system-prompt (overriding the model defaults) and user question
|
||||||
|
OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
|
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// start conversation with model
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println(chatResult.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> NI.
|
||||||
|
|
||||||
|
## Create a conversation about an image (requires model with image recognition skills)
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
|
||||||
|
|
||||||
|
// Load Image from File and attach to user message (alternatively images could also be added via URL)
|
||||||
|
OllamaChatRequest requestModel =
|
||||||
|
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||||
|
List.of(
|
||||||
|
new File("/path/to/image"))).build();
|
||||||
|
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
System.out.println("First answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
|
builder.reset();
|
||||||
|
|
||||||
|
// Use history to ask further questions about the image or assistant answer
|
||||||
|
requestModel =
|
||||||
|
builder.withMessages(chatResult.getChatHistory())
|
||||||
|
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
||||||
|
|
||||||
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
System.out.println("Second answer: " + chatResult.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
|
||||||
|
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
|
||||||
|
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
|
||||||
|
> evening, given the warm lighting and the low position of the sun in the sky.
|
||||||
|
>
|
||||||
|
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
|
||||||
|
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
|
||||||
|
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
|
||||||
|
> confidently.
|
48
docs/docs/apis-generate/generate-async.md
Normal file
48
docs/docs/apis-generate/generate-async.md
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate - Async
|
||||||
|
|
||||||
|
This API lets you ask questions to the LLMs in a asynchronous way.
|
||||||
|
This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
|
||||||
|
background (such as threads) without blocking your code until the response arrives from the model.
|
||||||
|
|
||||||
|
This API corresponds to
|
||||||
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
String prompt = "List all cricket world cup teams of 2019.";
|
||||||
|
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
|
||||||
|
|
||||||
|
// Set the poll interval according to your needs.
|
||||||
|
// Smaller the poll interval, more frequently you receive the tokens.
|
||||||
|
int pollIntervalMilliseconds = 1000;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
String tokens = streamer.getStream().poll();
|
||||||
|
System.out.print(tokens);
|
||||||
|
if (!streamer.isAlive()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Thread.sleep(pollIntervalMilliseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println("\n------------------------");
|
||||||
|
System.out.println("Complete Response:");
|
||||||
|
System.out.println("------------------------");
|
||||||
|
|
||||||
|
System.out.println(streamer.getCompleteResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 5
|
sidebar_position: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
# Generate Embeddings
|
# Generate Embeddings
|
||||||
@ -12,6 +12,10 @@ Parameters:
|
|||||||
- `prompt`: text to generate embeddings for
|
- `prompt`: text to generate embeddings for
|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
@ -30,17 +34,17 @@ public class Main {
|
|||||||
|
|
||||||
You will get a response similar to:
|
You will get a response similar to:
|
||||||
|
|
||||||
```json
|
```javascript
|
||||||
[
|
[
|
||||||
0.5670403838157654,
|
0.5670403838157654,
|
||||||
0.009260174818336964,
|
0.009260174818336964,
|
||||||
0.23178744316101074,
|
0.23178744316101074,
|
||||||
-0.2916173040866852,
|
-0.2916173040866852,
|
||||||
-0.8924556970596313,
|
-0.8924556970596313,
|
||||||
0.8785552978515625,
|
0.8785552978515625,
|
||||||
-0.34576427936553955,
|
-0.34576427936553955,
|
||||||
0.5742510557174683,
|
0.5742510557174683,
|
||||||
-0.04222835972905159,
|
-0.04222835972905159,
|
||||||
-0.137906014919281
|
-0.137906014919281
|
||||||
]
|
]
|
||||||
```
|
```
|
@ -1,27 +1,35 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - With Image Files
|
# Generate - With Image Files
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
This API corresponds to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
:::caution
|
:::note
|
||||||
|
|
||||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||||
recommended.
|
recommended.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Ask (Sync)
|
## Synchronous mode
|
||||||
|
|
||||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
@ -29,10 +37,12 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
|
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
|
||||||
"What's in this image?",
|
"What's in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
new File("/path/to/image")));
|
new File("/path/to/image")),
|
||||||
|
new OptionsBuilder().build()
|
||||||
|
);
|
||||||
System.out.println(result.getResponse());
|
System.out.println(result.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,14 +1,14 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 4
|
sidebar_position: 5
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - With Image URLs
|
# Generate - With Image URLs
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
This API corresponds to
|
||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
:::caution
|
:::note
|
||||||
|
|
||||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||||
recommended.
|
recommended.
|
||||||
@ -22,6 +22,13 @@ Passing the link of this image the following code:
|
|||||||

|

|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
@ -29,10 +36,12 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
|
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
|
||||||
"What's in this image?",
|
"What's in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
||||||
|
new OptionsBuilder().build()
|
||||||
|
);
|
||||||
System.out.println(result.getResponse());
|
System.out.println(result.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
372
docs/docs/apis-generate/generate-with-tools.md
Normal file
372
docs/docs/apis-generate/generate-with-tools.md
Normal file
@ -0,0 +1,372 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate - With Tools
|
||||||
|
|
||||||
|
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
|
||||||
|
synchronous way.
|
||||||
|
This API corresponds to
|
||||||
|
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
This is an only an experimental implementation and has a very basic design.
|
||||||
|
|
||||||
|
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
|
||||||
|
this
|
||||||
|
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Function Calling/Tools
|
||||||
|
|
||||||
|
Assume you want to call a method in your code based on the response generated from the model.
|
||||||
|
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
|
||||||
|
transaction from your database and respond to the user with the transaction details.
|
||||||
|
|
||||||
|
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
|
||||||
|
|
||||||
|
### Create Functions
|
||||||
|
|
||||||
|
We can create static functions as our tools.
|
||||||
|
|
||||||
|
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
|
||||||
|
fuel price value.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
|
||||||
|
location.
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Another way to create our tools is by creating classes by extending `ToolFunction`.
|
||||||
|
|
||||||
|
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
|
||||||
|
details.
|
||||||
|
|
||||||
|
```java
|
||||||
|
class DBQueryFunction implements ToolFunction {
|
||||||
|
@Override
|
||||||
|
public Object apply(Map<String, Object> arguments) {
|
||||||
|
// perform DB operations here
|
||||||
|
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Define Tool Specifications
|
||||||
|
|
||||||
|
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
|
||||||
|
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDescription("Get current fuel price")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` property (`city`).
|
||||||
|
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDescription("Get current weather")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
|
||||||
|
|
||||||
|
- Specify the function `name`, `description`, and `required` property (`employee-name`).
|
||||||
|
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
|
||||||
|
|
||||||
|
```java
|
||||||
|
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("get-employee-details")
|
||||||
|
.functionDescription("Get employee details from the database")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||||
|
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||||
|
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(new DBQueryFunction())
|
||||||
|
.build();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Register the Tools
|
||||||
|
|
||||||
|
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
ollamaAPI.registerTool(databaseQueryToolSpecification);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create prompt with Tools
|
||||||
|
|
||||||
|
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt1 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt2 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Again, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
String prompt3 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withToolSpecification(databaseQueryToolSpecification)
|
||||||
|
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||||
|
.build();
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Again, fire away your question to the model.
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||||
|
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Full Example
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.exceptions.ToolInvocationException;
|
||||||
|
import io.github.ollama4j.tools.OllamaToolsResult;
|
||||||
|
import io.github.ollama4j.tools.ToolFunction;
|
||||||
|
import io.github.ollama4j.tools.Tools;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public class FunctionCallingWithMistralExample {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||||
|
|
||||||
|
String model = "mistral";
|
||||||
|
|
||||||
|
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-fuel-price")
|
||||||
|
.functionDescription("Get current fuel price")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("current-weather")
|
||||||
|
.functionDescription("Get current weather")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(SampleTools::getCurrentWeather)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||||
|
.functionName("get-employee-details")
|
||||||
|
.functionDescription("Get employee details from the database")
|
||||||
|
.properties(
|
||||||
|
new Tools.PropsBuilder()
|
||||||
|
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||||
|
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||||
|
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.toolDefinition(new DBQueryFunction())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||||
|
ollamaAPI.registerTool(weatherToolSpecification);
|
||||||
|
ollamaAPI.registerTool(databaseQueryToolSpecification);
|
||||||
|
|
||||||
|
String prompt1 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the petrol price in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt1);
|
||||||
|
|
||||||
|
String prompt2 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withPrompt("What is the current weather in Bengaluru?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt2);
|
||||||
|
|
||||||
|
String prompt3 = new Tools.PromptBuilder()
|
||||||
|
.withToolSpecification(fuelPriceToolSpecification)
|
||||||
|
.withToolSpecification(weatherToolSpecification)
|
||||||
|
.withToolSpecification(databaseQueryToolSpecification)
|
||||||
|
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||||
|
.build();
|
||||||
|
ask(ollamaAPI, model, prompt3);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
|
||||||
|
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
|
||||||
|
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||||
|
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SampleTools {
|
||||||
|
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
// Get details from fuel price API
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
// Get details from weather API
|
||||||
|
String location = arguments.get("city").toString();
|
||||||
|
return "Currently " + location + "'s weather is nice.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class DBQueryFunction implements ToolFunction {
|
||||||
|
@Override
|
||||||
|
public Object apply(Map<String, Object> arguments) {
|
||||||
|
// perform DB operations here
|
||||||
|
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Run this full example and you will get a response similar to:
|
||||||
|
|
||||||
|
::::tip[LLM Response]
|
||||||
|
|
||||||
|
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||||
|
|
||||||
|
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||||
|
|
||||||
|
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||||
|
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||||
|
|
||||||
|
::::
|
||||||
|
|
||||||
|
### Potential Improvements
|
||||||
|
|
||||||
|
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
|
||||||
|
registration. For example:
|
||||||
|
|
||||||
|
```java
|
||||||
|
|
||||||
|
@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
|
||||||
|
public String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||||
|
String location = arguments.get("location").toString();
|
||||||
|
String fuelType = arguments.get("fuelType").toString();
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
|
||||||
|
specific args separately with their data types. For example:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
public String getCurrentFuelPrice(String location, String fuelType) {
|
||||||
|
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Updating async/chat APIs with support for tool-based generation.
|
175
docs/docs/apis-generate/generate.md
Normal file
175
docs/docs/apis-generate/generate.md
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate - Sync
|
||||||
|
|
||||||
|
This API lets you ask questions to the LLMs in a synchronous way.
|
||||||
|
This API corresponds to
|
||||||
|
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||||
|
|
||||||
|
Use the `OptionBuilder` to build the `Options` object
|
||||||
|
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||||
|
Refer
|
||||||
|
to [this](/apis-extras/options-builder).
|
||||||
|
|
||||||
|
## Try asking a question about the model.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
OllamaResult result =
|
||||||
|
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
||||||
|
|
||||||
|
System.out.println(result.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
|
||||||
|
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
|
||||||
|
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
|
||||||
|
> require
|
||||||
|
> natural language understanding and generation capabilities.
|
||||||
|
|
||||||
|
## Try asking a question, receiving the answer streamed
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
// define a stream handler (Consumer<String>)
|
||||||
|
OllamaStreamHandler streamHandler = (s) -> {
|
||||||
|
System.out.println(s);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Should be called using seperate thread to gain non blocking streaming effect.
|
||||||
|
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||||
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
|
new OptionsBuilder().build(), streamHandler);
|
||||||
|
|
||||||
|
System.out.println("Full response: " + result.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> The
|
||||||
|
> The capital
|
||||||
|
> The capital of
|
||||||
|
> The capital of France
|
||||||
|
> The capital of France is
|
||||||
|
> The capital of France is Paris
|
||||||
|
> The capital of France is Paris.
|
||||||
|
> Full response: The capital of France is Paris.
|
||||||
|
|
||||||
|
## Try asking a question from general topics.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
String prompt = "List all cricket world cup teams of 2019.";
|
||||||
|
|
||||||
|
OllamaResult result =
|
||||||
|
ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
||||||
|
|
||||||
|
System.out.println(result.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You'd then get a response from the model:
|
||||||
|
|
||||||
|
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
|
||||||
|
> following teams
|
||||||
|
> participated in the tournament:
|
||||||
|
>
|
||||||
|
> 1. Afghanistan
|
||||||
|
> 2. Australia
|
||||||
|
> 3. Bangladesh
|
||||||
|
> 4. England
|
||||||
|
> 5. India
|
||||||
|
> 6. New Zealand
|
||||||
|
> 7. Pakistan
|
||||||
|
> 8. South Africa
|
||||||
|
> 9. Sri Lanka
|
||||||
|
> 10. West Indies
|
||||||
|
>
|
||||||
|
> These teams competed in a round-robin format, with the top four teams advancing to the
|
||||||
|
> semi-finals. The tournament was
|
||||||
|
> won by the England cricket team, who defeated New Zealand in the final.
|
||||||
|
|
||||||
|
## Try asking for a Database query for your data schema.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
import io.github.ollama4j.utils.SamplePrompts;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
String prompt =
|
||||||
|
SamplePrompts.getSampleDatabasePromptWithQuestion(
|
||||||
|
"List all customer names who have bought one or more products");
|
||||||
|
OllamaResult result =
|
||||||
|
ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
||||||
|
System.out.println(result.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
_Note: Here I've used
|
||||||
|
a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
|
||||||
|
containing a database schema from within this library for demonstration purposes._
|
||||||
|
|
||||||
|
You'd then get a response from the model:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT customers.name
|
||||||
|
FROM sales
|
||||||
|
JOIN customers ON sales.customer_id = customers.customer_id
|
||||||
|
GROUP BY customers.name;
|
||||||
|
```
|
74
docs/docs/apis-generate/prompt-builder.md
Normal file
74
docs/docs/apis-generate/prompt-builder.md
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 6
|
||||||
|
---
|
||||||
|
|
||||||
|
# Prompt Builder
|
||||||
|
|
||||||
|
This is designed for prompt engineering. It allows you to easily build the prompt text for zero-shot, one-shot, few-shot
|
||||||
|
inferences.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
|
import io.github.ollama4j.utils.PromptBuilder;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||||
|
|
||||||
|
String model = OllamaModelType.PHI;
|
||||||
|
|
||||||
|
PromptBuilder promptBuilder =
|
||||||
|
new PromptBuilder()
|
||||||
|
.addLine("You are an expert coder and understand different programming languages.")
|
||||||
|
.addLine("Given a question, answer ONLY with code.")
|
||||||
|
.addLine("Produce clean, formatted and indented code in markdown format.")
|
||||||
|
.addLine(
|
||||||
|
"DO NOT include ANY extra text apart from code. Follow this instruction very strictly!")
|
||||||
|
.addLine("If there's any additional information you want to add, use comments within code.")
|
||||||
|
.addLine("Answer only in the programming language that has been asked for.")
|
||||||
|
.addSeparator()
|
||||||
|
.addLine("Example: Sum 2 numbers in Python")
|
||||||
|
.addLine("Answer:")
|
||||||
|
.addLine("```python")
|
||||||
|
.addLine("def sum(num1: int, num2: int) -> int:")
|
||||||
|
.addLine(" return num1 + num2")
|
||||||
|
.addLine("```")
|
||||||
|
.addSeparator()
|
||||||
|
.add("How do I read a file in Go and print its contents to stdout?");
|
||||||
|
|
||||||
|
boolean raw = false;
|
||||||
|
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
|
||||||
|
System.out.println(response.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
func readFile(fileName string) {
|
||||||
|
file, err := ioutil.ReadFile(fileName)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Error reading file:", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
f, _ := ioutil.ReadFile("file.txt")
|
||||||
|
if f != nil {
|
||||||
|
fmt.Println(f.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"label": "APIs - Model Management",
|
"label": "APIs - Model Management",
|
||||||
"position": 4,
|
"position": 2,
|
||||||
"link": {
|
"link": {
|
||||||
"type": "generated-index",
|
"type": "generated-index",
|
||||||
"description": "Details of APIs to manage LLMs."
|
"description": "Details of APIs to manage LLMs."
|
||||||
|
@ -6,6 +6,26 @@ sidebar_position: 4
|
|||||||
|
|
||||||
This API lets you create a custom model on the Ollama server.
|
This API lets you create a custom model on the Ollama server.
|
||||||
|
|
||||||
|
### Create a model from an existing Modelfile in the Ollama server
|
||||||
|
|
||||||
|
```java title="CreateModel.java"
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
|
public class CreateModel {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
ollamaAPI.createModelWithFilePath("mario", "/path/to/mario/modelfile/on/ollama-server");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create a model by passing the contents of Modelfile
|
||||||
|
|
||||||
```java title="CreateModel.java"
|
```java title="CreateModel.java"
|
||||||
public class CreateModel {
|
public class CreateModel {
|
||||||
|
|
||||||
@ -15,9 +35,128 @@ public class CreateModel {
|
|||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
ollamaAPI.createModel("mycustommodel", "/path/to/modelfile/on/ollama-server");
|
ollamaAPI.createModelWithModelFileContents("mario", "FROM llama2\nSYSTEM You are mario from Super Mario Bros.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Once created, you can see it when you use [list models](./list-models) API.
|
Once created, you can see it when you use [list models](./list-models) API.
|
||||||
|
|
||||||
|
### Example of a `Modelfile`
|
||||||
|
|
||||||
|
```
|
||||||
|
FROM llama2
|
||||||
|
# sets the temperature to 1 [higher is more creative, lower is more coherent]
|
||||||
|
PARAMETER temperature 1
|
||||||
|
# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token
|
||||||
|
PARAMETER num_ctx 4096
|
||||||
|
|
||||||
|
# sets a custom system message to specify the behavior of the chat assistant
|
||||||
|
SYSTEM You are Mario from super mario bros, acting as an assistant.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Format of the `Modelfile`
|
||||||
|
|
||||||
|
```modelfile
|
||||||
|
# comment
|
||||||
|
INSTRUCTION arguments
|
||||||
|
```
|
||||||
|
|
||||||
|
| Instruction | Description |
|
||||||
|
|-------------------------------------|----------------------------------------------------------------|
|
||||||
|
| [`FROM`](#from-required) (required) | Defines the base model to use. |
|
||||||
|
| [`PARAMETER`](#parameter) | Sets the parameters for how Ollama will run the model. |
|
||||||
|
| [`TEMPLATE`](#template) | The full prompt template to be sent to the model. |
|
||||||
|
| [`SYSTEM`](#system) | Specifies the system message that will be set in the template. |
|
||||||
|
| [`ADAPTER`](#adapter) | Defines the (Q)LoRA adapters to apply to the model. |
|
||||||
|
| [`LICENSE`](#license) | Specifies the legal license. |
|
||||||
|
|
||||||
|
#### PARAMETER
|
||||||
|
|
||||||
|
The `PARAMETER` instruction defines a parameter that can be set when the model is run.
|
||||||
|
|
||||||
|
| Parameter | Description | Value Type | Example Usage |
|
||||||
|
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
|
||||||
|
| mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 |
|
||||||
|
| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 |
|
||||||
|
| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 |
|
||||||
|
| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 |
|
||||||
|
| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |
|
||||||
|
| num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |
|
||||||
|
| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 |
|
||||||
|
| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 |
|
||||||
|
| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 |
|
||||||
|
| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 |
|
||||||
|
| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 |
|
||||||
|
| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |
|
||||||
|
| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 |
|
||||||
|
| num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 |
|
||||||
|
| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 |
|
||||||
|
| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 |
|
||||||
|
|
||||||
|
#### TEMPLATE
|
||||||
|
|
||||||
|
`TEMPLATE` of the full prompt template to be passed into the model. It may include (optionally) a system message and a
|
||||||
|
user's prompt. This is used to create a full custom prompt, and syntax may be model specific. You can usually find the
|
||||||
|
template for a given model in the readme for that model.
|
||||||
|
|
||||||
|
#### Template Variables
|
||||||
|
|
||||||
|
| Variable | Description |
|
||||||
|
|-----------------|---------------------------------------------------------------------------------------------------------------|
|
||||||
|
| `{{ .System }}` | The system message used to specify custom behavior, this must also be set in the Modelfile as an instruction. |
|
||||||
|
| `{{ .Prompt }}` | The incoming prompt, this is not specified in the model file and will be set based on input. |
|
||||||
|
| `{{ .First }}` | A boolean value used to render specific template information for the first generation of a session. |
|
||||||
|
|
||||||
|
```modelfile
|
||||||
|
TEMPLATE """
|
||||||
|
{{- if .First }}
|
||||||
|
### System:
|
||||||
|
{{ .System }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
### User:
|
||||||
|
{{ .Prompt }}
|
||||||
|
|
||||||
|
### Response:
|
||||||
|
"""
|
||||||
|
|
||||||
|
SYSTEM """<system message>"""
|
||||||
|
```
|
||||||
|
|
||||||
|
### SYSTEM
|
||||||
|
|
||||||
|
The `SYSTEM` instruction specifies the system message to be used in the template, if applicable.
|
||||||
|
|
||||||
|
```modelfile
|
||||||
|
SYSTEM """<system message>"""
|
||||||
|
```
|
||||||
|
|
||||||
|
### ADAPTER
|
||||||
|
|
||||||
|
The `ADAPTER` instruction specifies the LoRA adapter to apply to the base model. The value of this instruction should be
|
||||||
|
an absolute path or a path relative to the Modelfile and the file must be in a GGML file format. The adapter should be
|
||||||
|
tuned from the base model otherwise the behaviour is undefined.
|
||||||
|
|
||||||
|
```modelfile
|
||||||
|
ADAPTER ./ollama-lora.bin
|
||||||
|
```
|
||||||
|
|
||||||
|
### LICENSE
|
||||||
|
|
||||||
|
The `LICENSE` instruction allows you to specify the legal license under which the model used with this Modelfile is
|
||||||
|
shared or distributed.
|
||||||
|
|
||||||
|
```modelfile
|
||||||
|
LICENSE """
|
||||||
|
<license text>
|
||||||
|
"""
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- the **`Modelfile` is not case sensitive**. In the examples, uppercase instructions are used to make it easier to
|
||||||
|
distinguish it from arguments.
|
||||||
|
- Instructions can be in any order. In the examples, the `FROM` instruction is first to keep it easily readable.
|
||||||
|
|
||||||
|
Read more about Modelfile: https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
|
@ -7,6 +7,8 @@ sidebar_position: 5
|
|||||||
This API lets you create a delete a model from the Ollama server.
|
This API lets you create a delete a model from the Ollama server.
|
||||||
|
|
||||||
```java title="DeleteModel.java"
|
```java title="DeleteModel.java"
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -7,6 +7,10 @@ sidebar_position: 3
|
|||||||
This API lets you get the details of a model on the Ollama server.
|
This API lets you get the details of a model on the Ollama server.
|
||||||
|
|
||||||
```java title="GetModelDetails.java"
|
```java title="GetModelDetails.java"
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.ModelDetail;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -7,6 +7,11 @@ sidebar_position: 1
|
|||||||
This API lets you list available models on the Ollama server.
|
This API lets you list available models on the Ollama server.
|
||||||
|
|
||||||
```java title="ListModels.java"
|
```java title="ListModels.java"
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.models.response.Model;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class ListModels {
|
public class ListModels {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -7,10 +7,13 @@ sidebar_position: 2
|
|||||||
This API lets you pull a model on the Ollama server.
|
This API lets you pull a model on the Ollama server.
|
||||||
|
|
||||||
```java title="PullModel.java"
|
```java title="PullModel.java"
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.types.OllamaModelType;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
|
||||||
String host = "http://localhost:11434/";
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
@ -2,10 +2,38 @@
|
|||||||
sidebar_position: 1
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
# Intro
|
# Introduction
|
||||||
|
|
||||||
Let's get started with **Ollama4j**.
|
Let's get started with **Ollama4j**.
|
||||||
|
|
||||||
|
## 🦙 What is Ollama?
|
||||||
|
|
||||||
|
[Ollama](https://ollama.ai/) is an advanced AI tool that allows users to easily set up and run large language models
|
||||||
|
locally (in CPU and GPU
|
||||||
|
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
|
||||||
|
models.
|
||||||
|
|
||||||
|
## 👨💻 Why Ollama4j?
|
||||||
|
|
||||||
|
Ollama4j was built for the simple purpose of integrating Ollama with Java applications.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
o4j[Ollama4j]
|
||||||
|
o[Ollama Server]
|
||||||
|
o4j -->|Communicates with| o;
|
||||||
|
m[Models]
|
||||||
|
p[Your Java Project]
|
||||||
|
subgraph Your Java Environment
|
||||||
|
direction TB
|
||||||
|
p -->|Uses| o4j
|
||||||
|
end
|
||||||
|
subgraph Ollama Setup
|
||||||
|
direction TB
|
||||||
|
o -->|Manages| m
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
### What you'll need
|
### What you'll need
|
||||||
@ -50,13 +78,13 @@ Add the dependency to your project's `pom.xml`.
|
|||||||
```xml
|
```xml
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.ollama4j</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.27</version>
|
<version>1.0.78</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
|
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
|
||||||
|
|
||||||
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
|
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
|
||||||
example,
|
example,
|
||||||
@ -88,6 +116,26 @@ or use other suitable implementations.
|
|||||||
Create a new Java class in your project and add this code.
|
Create a new Java class in your project and add this code.
|
||||||
|
|
||||||
```java
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
|
public class OllamaAPITest {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI();
|
||||||
|
|
||||||
|
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||||
|
|
||||||
|
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
This uses the default Ollama host as `http://localhost:11434`.
|
||||||
|
|
||||||
|
Specify a different Ollama host that you want to connect to.
|
||||||
|
|
||||||
|
```java
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
|
||||||
public class OllamaAPITest {
|
public class OllamaAPITest {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
@ -99,7 +147,7 @@ public class OllamaAPITest {
|
|||||||
|
|
||||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||||
|
|
||||||
System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
|
System.out.println("Is Ollama server running: " + isOllamaServerReachable);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -20,7 +20,7 @@ const config = {
|
|||||||
|
|
||||||
// GitHub pages deployment config.
|
// GitHub pages deployment config.
|
||||||
// If you aren't using GitHub pages, you don't need these.
|
// If you aren't using GitHub pages, you don't need these.
|
||||||
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
|
organizationName: 'ollama4j', // Usually your GitHub org/user name.
|
||||||
projectName: 'ollama4j', // Usually your repo name.
|
projectName: 'ollama4j', // Usually your repo name.
|
||||||
|
|
||||||
onBrokenLinks: 'throw',
|
onBrokenLinks: 'throw',
|
||||||
@ -40,22 +40,28 @@ const config = {
|
|||||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||||
({
|
({
|
||||||
docs: {
|
docs: {
|
||||||
|
path: 'docs',
|
||||||
|
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
|
||||||
sidebarPath: './sidebars.js',
|
sidebarPath: './sidebars.js',
|
||||||
// Please change this to your repo.
|
// Please change this to your repo.
|
||||||
// Remove this to remove the "edit this page" links.
|
// Remove this to remove the "edit this page" links.
|
||||||
editUrl:
|
editUrl:
|
||||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
'https://github.com/ollama4j/ollama4j/blob/main/docs',
|
||||||
},
|
},
|
||||||
blog: {
|
blog: {
|
||||||
showReadingTime: true,
|
showReadingTime: true,
|
||||||
// Please change this to your repo.
|
// Please change this to your repo.
|
||||||
// Remove this to remove the "edit this page" links.
|
// Remove this to remove the "edit this page" links.
|
||||||
editUrl:
|
editUrl:
|
||||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
'https://github.com/ollama4j/ollama4j/blob/main/docs',
|
||||||
},
|
},
|
||||||
theme: {
|
theme: {
|
||||||
customCss: './src/css/custom.css',
|
customCss: './src/css/custom.css',
|
||||||
},
|
},
|
||||||
|
gtag: {
|
||||||
|
trackingID: 'G-G7FLH6FNDC',
|
||||||
|
anonymizeIP: false,
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
@ -76,12 +82,13 @@ const config = {
|
|||||||
type: 'docSidebar',
|
type: 'docSidebar',
|
||||||
sidebarId: 'tutorialSidebar',
|
sidebarId: 'tutorialSidebar',
|
||||||
position: 'left',
|
position: 'left',
|
||||||
label: 'Usage',
|
label: 'Docs',
|
||||||
},
|
},
|
||||||
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
{to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
||||||
|
{to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
|
||||||
{to: '/blog', label: 'Blog', position: 'left'},
|
{to: '/blog', label: 'Blog', position: 'left'},
|
||||||
{
|
{
|
||||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
href: 'https://github.com/ollama4j/ollama4j',
|
||||||
label: 'GitHub',
|
label: 'GitHub',
|
||||||
position: 'right',
|
position: 'right',
|
||||||
},
|
},
|
||||||
@ -95,7 +102,7 @@ const config = {
|
|||||||
items: [
|
items: [
|
||||||
{
|
{
|
||||||
label: 'Tutorial',
|
label: 'Tutorial',
|
||||||
to: '/docs/intro',
|
to: '/intro',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -121,7 +128,7 @@ const config = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'GitHub',
|
label: 'GitHub',
|
||||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
href: 'https://github.com/ollama4j/ollama4j',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -131,8 +138,13 @@ const config = {
|
|||||||
prism: {
|
prism: {
|
||||||
theme: prismThemes.github,
|
theme: prismThemes.github,
|
||||||
darkTheme: prismThemes.dracula,
|
darkTheme: prismThemes.dracula,
|
||||||
|
additionalLanguages: ['java'],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
markdown: {
|
||||||
|
mermaid: true,
|
||||||
|
},
|
||||||
|
themes: ['@docusaurus/theme-mermaid']
|
||||||
};
|
};
|
||||||
|
|
||||||
export default config;
|
export default config;
|
||||||
|
3068
docs/package-lock.json
generated
3068
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -14,8 +14,10 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "3.0.1",
|
"@docusaurus/core": "^3.4.0",
|
||||||
"@docusaurus/preset-classic": "3.0.1",
|
"@docusaurus/plugin-google-gtag": "^3.4.0",
|
||||||
|
"@docusaurus/preset-classic": "^3.4.0",
|
||||||
|
"@docusaurus/theme-mermaid": "^3.4.0",
|
||||||
"@mdx-js/react": "^3.0.0",
|
"@mdx-js/react": "^3.0.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"prism-react-renderer": "^2.3.0",
|
"prism-react-renderer": "^2.3.0",
|
||||||
@ -23,8 +25,8 @@
|
|||||||
"react-dom": "^18.0.0"
|
"react-dom": "^18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@docusaurus/module-type-aliases": "3.0.1",
|
"@docusaurus/module-type-aliases": "^3.4.0",
|
||||||
"@docusaurus/types": "3.0.1"
|
"@docusaurus/types": "^3.4.0"
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
"production": [
|
"production": [
|
||||||
|
@ -6,25 +6,35 @@
|
|||||||
|
|
||||||
/* You can override the default Infima variables here. */
|
/* You can override the default Infima variables here. */
|
||||||
:root {
|
:root {
|
||||||
--ifm-color-primary: #2e8555;
|
--ifm-color-primary: #2e8555;
|
||||||
--ifm-color-primary-dark: #29784c;
|
--ifm-color-primary-dark: #29784c;
|
||||||
--ifm-color-primary-darker: #277148;
|
--ifm-color-primary-darker: #277148;
|
||||||
--ifm-color-primary-darkest: #205d3b;
|
--ifm-color-primary-darkest: #205d3b;
|
||||||
--ifm-color-primary-light: #33925d;
|
--ifm-color-primary-light: #33925d;
|
||||||
--ifm-color-primary-lighter: #359962;
|
--ifm-color-primary-lighter: #359962;
|
||||||
--ifm-color-primary-lightest: #3cad6e;
|
--ifm-color-primary-lightest: #3cad6e;
|
||||||
--ifm-code-font-size: 95%;
|
--ifm-code-font-size: 95%;
|
||||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
|
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
||||||
[data-theme='dark'] {
|
[data-theme='dark'] {
|
||||||
--ifm-color-primary: #25c2a0;
|
--ifm-color-primary: #25c2a0;
|
||||||
--ifm-color-primary-dark: #21af90;
|
--ifm-color-primary-dark: #21af90;
|
||||||
--ifm-color-primary-darker: #1fa588;
|
--ifm-color-primary-darker: #1fa588;
|
||||||
--ifm-color-primary-darkest: #1a8870;
|
--ifm-color-primary-darkest: #1a8870;
|
||||||
--ifm-color-primary-light: #29d5b0;
|
--ifm-color-primary-light: #29d5b0;
|
||||||
--ifm-color-primary-lighter: #32d8b4;
|
--ifm-color-primary-lighter: #32d8b4;
|
||||||
--ifm-color-primary-lightest: #4fddbf;
|
--ifm-color-primary-lightest: #4fddbf;
|
||||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
article > header > h1 {
|
||||||
|
font-size: 2rem !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
div > h1,
|
||||||
|
header > h1,
|
||||||
|
h2 > a {
|
||||||
|
font-size: 2rem !important;
|
||||||
|
}
|
@ -19,7 +19,7 @@ function HomepageHeader() {
|
|||||||
<div className={styles.buttons}>
|
<div className={styles.buttons}>
|
||||||
<Link
|
<Link
|
||||||
className="button button--secondary button--lg"
|
className="button button--secondary button--lg"
|
||||||
to="/docs/intro">
|
to="/intro">
|
||||||
Getting Started
|
Getting Started
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
|
BIN
logo-small.png
Normal file
BIN
logo-small.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.0 KiB |
167
pom.xml
167
pom.xml
@ -1,14 +1,16 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.ollama4j</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.30</version>
|
<version>ollama4j-revision</version>
|
||||||
|
|
||||||
<name>Ollama4j</name>
|
<name>Ollama4j</name>
|
||||||
<description>Java library for interacting with Ollama API.</description>
|
<description>Java library for interacting with Ollama API.</description>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/ollama4j/ollama4j</url>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<maven.compiler.source>11</maven.compiler.source>
|
<maven.compiler.source>11</maven.compiler.source>
|
||||||
@ -31,15 +33,15 @@
|
|||||||
<licenses>
|
<licenses>
|
||||||
<license>
|
<license>
|
||||||
<name>MIT License</name>
|
<name>MIT License</name>
|
||||||
<url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url>
|
<url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
|
||||||
</license>
|
</license>
|
||||||
</licenses>
|
</licenses>
|
||||||
|
|
||||||
<scm>
|
<scm>
|
||||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
<connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
|
||||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
<developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/ollama4j/ollama4j</url>
|
||||||
<tag>v1.0.30</tag>
|
<tag>ollama4j-revision</tag>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@ -70,27 +72,7 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<!-- <plugin>-->
|
|
||||||
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
|
||||||
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
|
|
||||||
<!-- <version>1.5</version>-->
|
|
||||||
<!-- <executions>-->
|
|
||||||
<!-- <execution>-->
|
|
||||||
<!-- <id>sign-artifacts</id>-->
|
|
||||||
<!-- <phase>verify</phase>-->
|
|
||||||
<!-- <goals>-->
|
|
||||||
<!-- <goal>sign</goal>-->
|
|
||||||
<!-- </goals>-->
|
|
||||||
<!-- <configuration>-->
|
|
||||||
<!-- <!– This is necessary for gpg to not try to use the pinentry programs –>-->
|
|
||||||
<!-- <gpgArguments>-->
|
|
||||||
<!-- <arg>--pinentry-mode</arg>-->
|
|
||||||
<!-- <arg>loopback</arg>-->
|
|
||||||
<!-- </gpgArguments>-->
|
|
||||||
<!-- </configuration>-->
|
|
||||||
<!-- </execution>-->
|
|
||||||
<!-- </executions>-->
|
|
||||||
<!-- </plugin>-->
|
|
||||||
<!-- Surefire Plugin for Unit Tests -->
|
<!-- Surefire Plugin for Unit Tests -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
@ -99,7 +81,7 @@
|
|||||||
<configuration>
|
<configuration>
|
||||||
<skipTests>${skipUnitTests}</skipTests>
|
<skipTests>${skipUnitTests}</skipTests>
|
||||||
<includes>
|
<includes>
|
||||||
<include>**/unittests/*.java</include>
|
<include>**/unittests/**/*.java</include>
|
||||||
</includes>
|
</includes>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
@ -127,18 +109,33 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-release-plugin</artifactId>
|
<artifactId>maven-gpg-plugin</artifactId>
|
||||||
<version>3.0.1</version>
|
<version>1.5</version>
|
||||||
<configuration>
|
<executions>
|
||||||
<!-- <goals>install</goals>-->
|
<execution>
|
||||||
<tagNameFormat>v@{project.version}</tagNameFormat>
|
<id>sign-artifacts</id>
|
||||||
</configuration>
|
<phase>verify</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>sign</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>gitea</id>
|
||||||
|
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.projectlombok</groupId>
|
<groupId>org.projectlombok</groupId>
|
||||||
@ -149,12 +146,17 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
<groupId>com.fasterxml.jackson.core</groupId>
|
||||||
<artifactId>jackson-databind</artifactId>
|
<artifactId>jackson-databind</artifactId>
|
||||||
<version>2.15.3</version>
|
<version>2.17.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||||
|
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||||
|
<version>2.17.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>ch.qos.logback</groupId>
|
<groupId>ch.qos.logback</groupId>
|
||||||
<artifactId>logback-classic</artifactId>
|
<artifactId>logback-classic</artifactId>
|
||||||
<version>1.3.11</version>
|
<version>1.5.6</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
@ -174,20 +176,52 @@
|
|||||||
<version>4.1.0</version>
|
<version>4.1.0</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.json</groupId>
|
||||||
|
<artifactId>json</artifactId>
|
||||||
|
<version>20240205</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<distributionManagement>
|
<distributionManagement>
|
||||||
<snapshotRepository>
|
<snapshotRepository>
|
||||||
<id>ossrh</id>
|
<id>gitea</id>
|
||||||
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
|
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
|
||||||
</snapshotRepository>
|
</snapshotRepository>
|
||||||
<repository>
|
<repository>
|
||||||
<id>ossrh</id>
|
<id>gitea</id>
|
||||||
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
|
<url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
|
||||||
</repository>
|
</repository>
|
||||||
</distributionManagement>
|
</distributionManagement>
|
||||||
|
|
||||||
<profiles>
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>ossrh</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>true</activeByDefault>
|
||||||
|
</activation>
|
||||||
|
<properties>
|
||||||
|
<gpg.executable>gpg2</gpg.executable>
|
||||||
|
<test.env>unit</test.env>
|
||||||
|
<skipUnitTests>false</skipUnitTests>
|
||||||
|
<skipIntegrationTests>true</skipIntegrationTests>
|
||||||
|
</properties>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.sonatype.central</groupId>
|
||||||
|
<artifactId>central-publishing-maven-plugin</artifactId>
|
||||||
|
<version>0.5.0</version>
|
||||||
|
<extensions>true</extensions>
|
||||||
|
<configuration>
|
||||||
|
<publishingServerId>mvn-repo-id</publishingServerId>
|
||||||
|
<autoPublish>true</autoPublish>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
<profile>
|
<profile>
|
||||||
<id>unit-tests</id>
|
<id>unit-tests</id>
|
||||||
<properties>
|
<properties>
|
||||||
@ -196,8 +230,31 @@
|
|||||||
<skipIntegrationTests>true</skipIntegrationTests>
|
<skipIntegrationTests>true</skipIntegrationTests>
|
||||||
</properties>
|
</properties>
|
||||||
<activation>
|
<activation>
|
||||||
<activeByDefault>true</activeByDefault>
|
<activeByDefault>false</activeByDefault>
|
||||||
</activation>
|
</activation>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.jacoco</groupId>
|
||||||
|
<artifactId>jacoco-maven-plugin</artifactId>
|
||||||
|
<version>0.8.11</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>prepare-agent</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
<execution>
|
||||||
|
<id>report</id>
|
||||||
|
<phase>test</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>report</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
<profile>
|
||||||
<id>integration-tests</id>
|
<id>integration-tests</id>
|
||||||
@ -249,9 +306,29 @@
|
|||||||
<autoReleaseAfterClose>true</autoReleaseAfterClose>
|
<autoReleaseAfterClose>true</autoReleaseAfterClose>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.jacoco</groupId>
|
||||||
|
<artifactId>jacoco-maven-plugin</artifactId>
|
||||||
|
<version>0.8.7</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>prepare-agent</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
<execution>
|
||||||
|
<id>report</id>
|
||||||
|
<phase>test</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>report</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
</profiles>
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
@ -1,510 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.*;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.net.http.HttpClient;
|
|
||||||
import java.net.http.HttpConnectTimeoutException;
|
|
||||||
import java.net.http.HttpRequest;
|
|
||||||
import java.net.http.HttpResponse;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Base64;
|
|
||||||
import java.util.List;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/** The base Ollama API class. */
|
|
||||||
@SuppressWarnings("DuplicatedCode")
|
|
||||||
public class OllamaAPI {
|
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
|
||||||
private final String host;
|
|
||||||
private long requestTimeoutSeconds = 3;
|
|
||||||
private boolean verbose = true;
|
|
||||||
private String username;
|
|
||||||
private String password;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Instantiates the Ollama API.
|
|
||||||
*
|
|
||||||
* @param host the host address of Ollama server
|
|
||||||
*/
|
|
||||||
public OllamaAPI(String host) {
|
|
||||||
if (host.endsWith("/")) {
|
|
||||||
this.host = host.substring(0, host.length() - 1);
|
|
||||||
} else {
|
|
||||||
this.host = host;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
|
|
||||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set/unset logging of responses
|
|
||||||
*
|
|
||||||
* @param verbose true/false
|
|
||||||
*/
|
|
||||||
public void setVerbose(boolean verbose) {
|
|
||||||
this.verbose = verbose;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public void setBasicAuth(String username, String password) {
|
|
||||||
this.username = username;
|
|
||||||
this.password = password;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* API to check the reachability of Ollama server.
|
|
||||||
*
|
|
||||||
* @return true if the server is reachable, false otherwise.
|
|
||||||
*/
|
|
||||||
public boolean ping() {
|
|
||||||
String url = this.host + "/api/tags";
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
HttpRequest httpRequest = null;
|
|
||||||
try {
|
|
||||||
httpRequest =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(new URI(url))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.GET()
|
|
||||||
.build();
|
|
||||||
} catch (URISyntaxException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
HttpResponse<String> response = null;
|
|
||||||
try {
|
|
||||||
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
|
||||||
} catch (HttpConnectTimeoutException e) {
|
|
||||||
return false;
|
|
||||||
} catch (IOException | InterruptedException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
return statusCode == 200;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List available models from Ollama server.
|
|
||||||
*
|
|
||||||
* @return the list
|
|
||||||
*/
|
|
||||||
public List<Model> listModels()
|
|
||||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
|
||||||
String url = this.host + "/api/tags";
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
HttpRequest httpRequest =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(new URI(url))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.GET()
|
|
||||||
.build();
|
|
||||||
HttpResponse<String> response =
|
|
||||||
httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseString = response.body();
|
|
||||||
if (statusCode == 200) {
|
|
||||||
return Utils.getObjectMapper()
|
|
||||||
.readValue(responseString, ListModelsResponse.class)
|
|
||||||
.getModels();
|
|
||||||
} else {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseString);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pull a model on the Ollama server from the list of <a
|
|
||||||
* href="https://ollama.ai/library">available models</a>.
|
|
||||||
*
|
|
||||||
* @param modelName the name of the model
|
|
||||||
*/
|
|
||||||
public void pullModel(String modelName)
|
|
||||||
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
|
|
||||||
String url = this.host + "/api/pull";
|
|
||||||
String jsonData = new ModelRequest(modelName).toString();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(new URI(url))
|
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.build();
|
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
|
||||||
HttpResponse<InputStream> response =
|
|
||||||
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
InputStream responseBodyStream = response.body();
|
|
||||||
String responseString = "";
|
|
||||||
try (BufferedReader reader =
|
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
|
||||||
String line;
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
ModelPullResponse modelPullResponse =
|
|
||||||
Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
|
|
||||||
if (verbose) {
|
|
||||||
logger.info(modelPullResponse.getStatus());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseString);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets model details from the Ollama server.
|
|
||||||
*
|
|
||||||
* @param modelName the model
|
|
||||||
* @return the model details
|
|
||||||
*/
|
|
||||||
public ModelDetail getModelDetails(String modelName)
|
|
||||||
throws IOException, OllamaBaseException, InterruptedException {
|
|
||||||
String url = this.host + "/api/show";
|
|
||||||
String jsonData = new ModelRequest(modelName).toString();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create(url))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
|
||||||
.build();
|
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseBody = response.body();
|
|
||||||
if (statusCode == 200) {
|
|
||||||
return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class);
|
|
||||||
} else {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a custom model from a model file. Read more about custom model file creation <a
|
|
||||||
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
|
|
||||||
*
|
|
||||||
* @param modelName the name of the custom model to be created.
|
|
||||||
* @param modelFilePath the path to model file that exists on the Ollama server.
|
|
||||||
*/
|
|
||||||
public void createModelWithFilePath(String modelName, String modelFilePath)
|
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
|
||||||
String url = this.host + "/api/create";
|
|
||||||
String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create(url))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
|
||||||
.build();
|
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseString = response.body();
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseString);
|
|
||||||
}
|
|
||||||
// FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this
|
|
||||||
// if the issue is fixed in the Ollama API server.
|
|
||||||
if (responseString.contains("error")) {
|
|
||||||
throw new OllamaBaseException(responseString);
|
|
||||||
}
|
|
||||||
if (verbose) {
|
|
||||||
logger.info(responseString);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a custom model from a model file. Read more about custom model file creation <a
|
|
||||||
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
|
|
||||||
*
|
|
||||||
* @param modelName the name of the custom model to be created.
|
|
||||||
* @param modelFileContents the path to model file that exists on the Ollama server.
|
|
||||||
*/
|
|
||||||
public void createModelWithModelFileContents(String modelName, String modelFileContents)
|
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
|
||||||
String url = this.host + "/api/create";
|
|
||||||
String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create(url))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
|
||||||
.build();
|
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseString = response.body();
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseString);
|
|
||||||
}
|
|
||||||
if (responseString.contains("error")) {
|
|
||||||
throw new OllamaBaseException(responseString);
|
|
||||||
}
|
|
||||||
if (verbose) {
|
|
||||||
logger.info(responseString);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a model from Ollama server.
|
|
||||||
*
|
|
||||||
* @param modelName the name of the model to be deleted.
|
|
||||||
* @param ignoreIfNotPresent - ignore errors if the specified model is not present on Ollama
|
|
||||||
* server.
|
|
||||||
*/
|
|
||||||
public void deleteModel(String modelName, boolean ignoreIfNotPresent)
|
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
|
||||||
String url = this.host + "/api/delete";
|
|
||||||
String jsonData = new ModelRequest(modelName).toString();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create(url))
|
|
||||||
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.header("Content-type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.build();
|
|
||||||
HttpClient client = HttpClient.newHttpClient();
|
|
||||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseBody = response.body();
|
|
||||||
if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate embeddings for a given text from a model
|
|
||||||
*
|
|
||||||
* @param model name of model to generate embeddings from
|
|
||||||
* @param prompt text to generate embeddings for
|
|
||||||
* @return embeddings
|
|
||||||
*/
|
|
||||||
public List<Double> generateEmbeddings(String model, String prompt)
|
|
||||||
throws IOException, InterruptedException, OllamaBaseException {
|
|
||||||
URI uri = URI.create(this.host + "/api/embeddings");
|
|
||||||
String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri)
|
|
||||||
.header("Accept", "application/json")
|
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData));
|
|
||||||
HttpRequest request = requestBuilder.build();
|
|
||||||
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
String responseBody = response.body();
|
|
||||||
if (statusCode == 200) {
|
|
||||||
EmbeddingResponse embeddingResponse =
|
|
||||||
Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
|
|
||||||
return embeddingResponse.getEmbedding();
|
|
||||||
} else {
|
|
||||||
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ask a question to a model running on Ollama server. This is a sync/blocking call.
|
|
||||||
*
|
|
||||||
* @param model the ollama model to ask the question to
|
|
||||||
* @param promptText the prompt/question text
|
|
||||||
* @return OllamaResult - that includes response text and time taken for response
|
|
||||||
*/
|
|
||||||
public OllamaResult ask(String model, String promptText)
|
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
|
|
||||||
return askSync(ollamaRequestModel);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ask a question to a model running on Ollama server and get a callback handle that can be used
|
|
||||||
* to check for status and get the response from the model later. This would be an
|
|
||||||
* async/non-blocking call.
|
|
||||||
*
|
|
||||||
* @param model the ollama model to ask the question to
|
|
||||||
* @param promptText the prompt/question text
|
|
||||||
* @return the ollama async result callback handle
|
|
||||||
*/
|
|
||||||
public OllamaAsyncResultCallback askAsync(String model, String promptText) {
|
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
|
||||||
OllamaAsyncResultCallback ollamaAsyncResultCallback =
|
|
||||||
new OllamaAsyncResultCallback(httpClient, uri, ollamaRequestModel, requestTimeoutSeconds);
|
|
||||||
ollamaAsyncResultCallback.start();
|
|
||||||
return ollamaAsyncResultCallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* With one or more image files, ask a question to a model running on Ollama server. This is a
|
|
||||||
* sync/blocking call.
|
|
||||||
*
|
|
||||||
* @param model the ollama model to ask the question to
|
|
||||||
* @param promptText the prompt/question text
|
|
||||||
* @param imageFiles the list of image files to use for the question
|
|
||||||
* @return OllamaResult - that includes response text and time taken for response
|
|
||||||
*/
|
|
||||||
public OllamaResult askWithImageFiles(String model, String promptText, List<File> imageFiles)
|
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
|
||||||
List<String> images = new ArrayList<>();
|
|
||||||
for (File imageFile : imageFiles) {
|
|
||||||
images.add(encodeFileToBase64(imageFile));
|
|
||||||
}
|
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images);
|
|
||||||
return askSync(ollamaRequestModel);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
|
|
||||||
* sync/blocking call.
|
|
||||||
*
|
|
||||||
* @param model the ollama model to ask the question to
|
|
||||||
* @param promptText the prompt/question text
|
|
||||||
* @param imageURLs the list of image URLs to use for the question
|
|
||||||
* @return OllamaResult - that includes response text and time taken for response
|
|
||||||
*/
|
|
||||||
public OllamaResult askWithImageURLs(String model, String promptText, List<String> imageURLs)
|
|
||||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
|
||||||
List<String> images = new ArrayList<>();
|
|
||||||
for (String imageURL : imageURLs) {
|
|
||||||
images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL)));
|
|
||||||
}
|
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images);
|
|
||||||
return askSync(ollamaRequestModel);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String encodeFileToBase64(File file) throws IOException {
|
|
||||||
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String encodeByteArrayToBase64(byte[] bytes) {
|
|
||||||
return Base64.getEncoder().encodeToString(bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static byte[] loadImageBytesFromUrl(String imageUrl)
|
|
||||||
throws IOException, URISyntaxException {
|
|
||||||
URL url = new URI(imageUrl).toURL();
|
|
||||||
try (InputStream in = url.openStream();
|
|
||||||
ByteArrayOutputStream out = new ByteArrayOutputStream()) {
|
|
||||||
byte[] buffer = new byte[1024];
|
|
||||||
int bytesRead;
|
|
||||||
while ((bytesRead = in.read(buffer)) != -1) {
|
|
||||||
out.write(buffer, 0, bytesRead);
|
|
||||||
}
|
|
||||||
return out.toByteArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
|
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
|
||||||
long startTime = System.currentTimeMillis();
|
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
|
||||||
HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri)
|
|
||||||
.POST(
|
|
||||||
HttpRequest.BodyPublishers.ofString(
|
|
||||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)));
|
|
||||||
HttpRequest request = requestBuilder.build();
|
|
||||||
logger.debug("Ask model '" + ollamaRequestModel + "' ...");
|
|
||||||
HttpResponse<InputStream> response =
|
|
||||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
InputStream responseBodyStream = response.body();
|
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
|
||||||
try (BufferedReader reader =
|
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
|
||||||
String line;
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
if (statusCode == 404) {
|
|
||||||
logger.warn("Status code: 404 (Not Found)");
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
} else if (statusCode == 401) {
|
|
||||||
logger.warn("Status code: 401 (Unauthorized)");
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
}else {
|
|
||||||
OllamaResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
|
||||||
if (!ollamaResponseModel.isDone()) {
|
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
logger.error("Status code " + statusCode + " instead 200");
|
|
||||||
throw new OllamaBaseException(responseBuffer.toString());
|
|
||||||
} else {
|
|
||||||
long endTime = System.currentTimeMillis();
|
|
||||||
return new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
|
||||||
HttpRequest.Builder requestBuilder =
|
|
||||||
HttpRequest.newBuilder(uri)
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds));
|
|
||||||
if (basicAuthCredentialsSet()) {
|
|
||||||
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
|
||||||
}
|
|
||||||
return requestBuilder;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return basic authentication header value (encoded credentials)
|
|
||||||
*/
|
|
||||||
private String getBasicAuthHeaderValue() {
|
|
||||||
String credentialsToEncode = username + ":" + password;
|
|
||||||
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return true when Basic Auth credentials set
|
|
||||||
*/
|
|
||||||
private boolean basicAuthCredentialsSet() {
|
|
||||||
if (username != null && password != null) {
|
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import java.util.Map;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
|
||||||
public class ModelDetail {
|
|
||||||
private String license;
|
|
||||||
|
|
||||||
@JsonProperty("modelfile")
|
|
||||||
private String modelFile;
|
|
||||||
|
|
||||||
private String parameters;
|
|
||||||
private String template;
|
|
||||||
private String system;
|
|
||||||
private Map<String, String> details;
|
|
||||||
}
|
|
@ -1,155 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.net.URI;
|
|
||||||
import java.net.http.HttpClient;
|
|
||||||
import java.net.http.HttpRequest;
|
|
||||||
import java.net.http.HttpResponse;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.Queue;
|
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
|
||||||
public class OllamaAsyncResultCallback extends Thread {
|
|
||||||
private final HttpClient client;
|
|
||||||
private final URI uri;
|
|
||||||
private final OllamaRequestModel ollamaRequestModel;
|
|
||||||
private final Queue<String> queue = new LinkedList<>();
|
|
||||||
private String result;
|
|
||||||
private boolean isDone;
|
|
||||||
private boolean succeeded;
|
|
||||||
|
|
||||||
private long requestTimeoutSeconds;
|
|
||||||
|
|
||||||
private int httpStatusCode;
|
|
||||||
private long responseTime = 0;
|
|
||||||
|
|
||||||
public OllamaAsyncResultCallback(
|
|
||||||
HttpClient client,
|
|
||||||
URI uri,
|
|
||||||
OllamaRequestModel ollamaRequestModel,
|
|
||||||
long requestTimeoutSeconds) {
|
|
||||||
this.client = client;
|
|
||||||
this.ollamaRequestModel = ollamaRequestModel;
|
|
||||||
this.uri = uri;
|
|
||||||
this.isDone = false;
|
|
||||||
this.result = "";
|
|
||||||
this.queue.add("");
|
|
||||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
long startTime = System.currentTimeMillis();
|
|
||||||
HttpRequest request =
|
|
||||||
HttpRequest.newBuilder(uri)
|
|
||||||
.POST(
|
|
||||||
HttpRequest.BodyPublishers.ofString(
|
|
||||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
|
||||||
.header("Content-Type", "application/json")
|
|
||||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
|
||||||
.build();
|
|
||||||
HttpResponse<InputStream> response =
|
|
||||||
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
this.httpStatusCode = statusCode;
|
|
||||||
|
|
||||||
InputStream responseBodyStream = response.body();
|
|
||||||
try (BufferedReader reader =
|
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
|
||||||
String line;
|
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
if (statusCode == 404) {
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
|
||||||
queue.add(ollamaResponseModel.getError());
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
} else {
|
|
||||||
OllamaResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
|
||||||
queue.add(ollamaResponseModel.getResponse());
|
|
||||||
if (!ollamaResponseModel.isDone()) {
|
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isDone = true;
|
|
||||||
this.succeeded = true;
|
|
||||||
this.result = responseBuffer.toString();
|
|
||||||
long endTime = System.currentTimeMillis();
|
|
||||||
responseTime = endTime - startTime;
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
throw new OllamaBaseException(this.result);
|
|
||||||
}
|
|
||||||
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
|
||||||
this.isDone = true;
|
|
||||||
this.succeeded = false;
|
|
||||||
this.result = "[FAILED] " + e.getMessage();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the status of the thread. This does not indicate that the request was successful or a
|
|
||||||
* failure, rather it is just a status flag to indicate if the thread is active or ended.
|
|
||||||
*
|
|
||||||
* @return boolean - status
|
|
||||||
*/
|
|
||||||
public boolean isComplete() {
|
|
||||||
return isDone;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the HTTP response status code for the request that was made to Ollama server.
|
|
||||||
*
|
|
||||||
* @return int - the status code for the request
|
|
||||||
*/
|
|
||||||
public int getHttpStatusCode() {
|
|
||||||
return httpStatusCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the status of the request. Indicates if the request was successful or a failure. If the
|
|
||||||
* request was a failure, the `getResponse()` method will return the error message.
|
|
||||||
*
|
|
||||||
* @return boolean - status
|
|
||||||
*/
|
|
||||||
public boolean isSucceeded() {
|
|
||||||
return succeeded;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the final response when the execution completes. Does not return intermediate results.
|
|
||||||
*
|
|
||||||
* @return String - response text
|
|
||||||
*/
|
|
||||||
public String getResponse() {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Queue<String> getStream() {
|
|
||||||
return queue;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the response time in milliseconds.
|
|
||||||
*
|
|
||||||
* @return long - response time in milliseconds.
|
|
||||||
*/
|
|
||||||
public long getResponseTime() {
|
|
||||||
return responseTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
|
|
||||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import java.util.List;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
|
||||||
public class OllamaErrorResponseModel {
|
|
||||||
|
|
||||||
private String error;
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
|
||||||
|
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
||||||
import java.util.List;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
public class OllamaRequestModel {
|
|
||||||
|
|
||||||
private String model;
|
|
||||||
private String prompt;
|
|
||||||
private List<String> images;
|
|
||||||
|
|
||||||
public OllamaRequestModel(String model, String prompt) {
|
|
||||||
this.model = model;
|
|
||||||
this.prompt = prompt;
|
|
||||||
}
|
|
||||||
|
|
||||||
public OllamaRequestModel(String model, String prompt, List<String> images) {
|
|
||||||
this.model = model;
|
|
||||||
this.prompt = prompt;
|
|
||||||
this.images = images;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
try {
|
|
||||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
|
||||||
} catch (JsonProcessingException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@AllArgsConstructor
|
|
||||||
public class ModelEmbeddingsRequest {
|
|
||||||
private String model;
|
|
||||||
private String prompt;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
try {
|
|
||||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
|
||||||
} catch (JsonProcessingException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,63 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.types;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A class to provide constants for all the supported models by Ollama.
|
|
||||||
*
|
|
||||||
* <p>Refer to the full list of models and the details here: <a
|
|
||||||
* href="https://ollama.ai/library">https://ollama.ai/library</a>
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("ALL")
|
|
||||||
public class OllamaModelType {
|
|
||||||
public static final String LLAMA2 = "llama2";
|
|
||||||
public static final String MISTRAL = "mistral";
|
|
||||||
public static final String LLAVA = "llava";
|
|
||||||
public static final String MIXTRAL = "mixtral";
|
|
||||||
public static final String STARLING_LM = "starling-lm";
|
|
||||||
public static final String NEURAL_CHAT = "neural-chat";
|
|
||||||
public static final String CODELLAMA = "codellama";
|
|
||||||
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
|
||||||
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
|
||||||
public static final String ORCA_MINI = "orca-mini";
|
|
||||||
public static final String VICUNA = "vicuna";
|
|
||||||
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
|
||||||
public static final String PHIND_CODELLAMA = "phind-codellama";
|
|
||||||
public static final String ZEPHYR = "zephyr";
|
|
||||||
public static final String WIZARDCODER = "wizardcoder";
|
|
||||||
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
|
||||||
public static final String NOUS_HERMES = "nous-hermes";
|
|
||||||
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
|
||||||
public static final String WIZARD_MATH = "wizard-math";
|
|
||||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
|
||||||
public static final String FALCON = "falcon";
|
|
||||||
public static final String ORCA2 = "orca2";
|
|
||||||
public static final String STABLE_BELUGA = "stable-beluga";
|
|
||||||
public static final String CODEUP = "codeup";
|
|
||||||
public static final String EVERYTHINGLM = "everythinglm";
|
|
||||||
public static final String MEDLLAMA2 = "medllama2";
|
|
||||||
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
|
||||||
public static final String STARCODER = "starcoder";
|
|
||||||
public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
|
|
||||||
public static final String OPENCHAT = "openchat";
|
|
||||||
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
|
||||||
public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
|
|
||||||
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
|
||||||
public static final String YI = "yi";
|
|
||||||
public static final String YARN_MISTRAL = "yarn-mistral";
|
|
||||||
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
|
||||||
public static final String SQLCODER = "sqlcoder";
|
|
||||||
public static final String YARN_LLAMA2 = "yarn-llama2";
|
|
||||||
public static final String MEDITRON = "meditron";
|
|
||||||
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
|
||||||
public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
|
|
||||||
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
|
||||||
public static final String MISTRALLITE = "mistrallite";
|
|
||||||
public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
|
|
||||||
public static final String WIZARDLM = "wizardlm";
|
|
||||||
public static final String CODEBOOGA = "codebooga";
|
|
||||||
public static final String MAGICODER = "magicoder";
|
|
||||||
public static final String GOLIATH = "goliath";
|
|
||||||
public static final String NEXUSRAVEN = "nexusraven";
|
|
||||||
public static final String ALFRED = "alfred";
|
|
||||||
public static final String XWINLM = "xwinlm";
|
|
||||||
public static final String BAKLLAVA = "bakllava";
|
|
||||||
}
|
|
@ -1,9 +0,0 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
public class Utils {
|
|
||||||
public static ObjectMapper getObjectMapper() {
|
|
||||||
return new ObjectMapper();
|
|
||||||
}
|
|
||||||
}
|
|
682
src/main/java/io/github/ollama4j/OllamaAPI.java
Normal file
682
src/main/java/io/github/ollama4j/OllamaAPI.java
Normal file
@ -0,0 +1,682 @@
|
|||||||
|
package io.github.ollama4j;
|
||||||
|
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.exceptions.ToolInvocationException;
|
||||||
|
import io.github.ollama4j.exceptions.ToolNotFoundException;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatMessage;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||||
|
import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
|
||||||
|
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
||||||
|
import io.github.ollama4j.models.request.*;
|
||||||
|
import io.github.ollama4j.models.response.*;
|
||||||
|
import io.github.ollama4j.tools.*;
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import lombok.Setter;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpConnectTimeoutException;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The base Ollama API class.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("DuplicatedCode")
|
||||||
|
public class OllamaAPI {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
|
private final String host;
|
||||||
|
/**
|
||||||
|
* -- SETTER --
|
||||||
|
* Set request timeout in seconds. Default is 3 seconds.
|
||||||
|
*/
|
||||||
|
@Setter
|
||||||
|
private long requestTimeoutSeconds = 10;
|
||||||
|
/**
|
||||||
|
* -- SETTER --
|
||||||
|
* Set/unset logging of responses
|
||||||
|
*/
|
||||||
|
@Setter
|
||||||
|
private boolean verbose = true;
|
||||||
|
private BasicAuth basicAuth;
|
||||||
|
|
||||||
|
private final ToolRegistry toolRegistry = new ToolRegistry();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
|
||||||
|
**/
|
||||||
|
public OllamaAPI() {
|
||||||
|
this.host = "http://localhost:11434";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Instantiates the Ollama API with specified Ollama host address.
|
||||||
|
*
|
||||||
|
* @param host the host address of Ollama server
|
||||||
|
*/
|
||||||
|
public OllamaAPI(String host) {
|
||||||
|
if (host.endsWith("/")) {
|
||||||
|
this.host = host.substring(0, host.length() - 1);
|
||||||
|
} else {
|
||||||
|
this.host = host;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
|
||||||
|
*
|
||||||
|
* @param username the username
|
||||||
|
* @param password the password
|
||||||
|
*/
|
||||||
|
public void setBasicAuth(String username, String password) {
|
||||||
|
this.basicAuth = new BasicAuth(username, password);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* API to check the reachability of Ollama server.
|
||||||
|
*
|
||||||
|
* @return true if the server is reachable, false otherwise.
|
||||||
|
*/
|
||||||
|
public boolean ping() {
|
||||||
|
String url = this.host + "/api/tags";
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
HttpRequest httpRequest = null;
|
||||||
|
try {
|
||||||
|
httpRequest =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.GET()
|
||||||
|
.build();
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
HttpResponse<String> response = null;
|
||||||
|
try {
|
||||||
|
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||||
|
} catch (HttpConnectTimeoutException e) {
|
||||||
|
return false;
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
return statusCode == 200;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides a list of running models and details about each model currently loaded into memory.
|
||||||
|
*
|
||||||
|
* @return ModelsProcessResponse
|
||||||
|
*/
|
||||||
|
public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
|
||||||
|
String url = this.host + "/api/ps";
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
HttpRequest httpRequest = null;
|
||||||
|
try {
|
||||||
|
httpRequest =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.GET()
|
||||||
|
.build();
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
HttpResponse<String> response = null;
|
||||||
|
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseString = response.body();
|
||||||
|
if (statusCode == 200) {
|
||||||
|
return Utils.getObjectMapper()
|
||||||
|
.readValue(responseString, ModelsProcessResponse.class);
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available models from Ollama server.
|
||||||
|
*
|
||||||
|
* @return the list
|
||||||
|
*/
|
||||||
|
public List<Model> listModels()
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
|
String url = this.host + "/api/tags";
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
HttpRequest httpRequest =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.GET()
|
||||||
|
.build();
|
||||||
|
HttpResponse<String> response =
|
||||||
|
httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseString = response.body();
|
||||||
|
if (statusCode == 200) {
|
||||||
|
return Utils.getObjectMapper()
|
||||||
|
.readValue(responseString, ListModelsResponse.class)
|
||||||
|
.getModels();
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull a model on the Ollama server from the list of <a
|
||||||
|
* href="https://ollama.ai/library">available models</a>.
|
||||||
|
*
|
||||||
|
* @param modelName the name of the model
|
||||||
|
*/
|
||||||
|
public void pullModel(String modelName)
|
||||||
|
throws OllamaBaseException, IOException, URISyntaxException, InterruptedException {
|
||||||
|
String url = this.host + "/api/pull";
|
||||||
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<InputStream> response =
|
||||||
|
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
InputStream responseBodyStream = response.body();
|
||||||
|
String responseString = "";
|
||||||
|
try (BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
ModelPullResponse modelPullResponse =
|
||||||
|
Utils.getObjectMapper().readValue(line, ModelPullResponse.class);
|
||||||
|
if (verbose) {
|
||||||
|
logger.info(modelPullResponse.getStatus());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets model details from the Ollama server.
|
||||||
|
*
|
||||||
|
* @param modelName the model
|
||||||
|
* @return the model details
|
||||||
|
*/
|
||||||
|
public ModelDetail getModelDetails(String modelName)
|
||||||
|
throws IOException, OllamaBaseException, InterruptedException, URISyntaxException {
|
||||||
|
String url = this.host + "/api/show";
|
||||||
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseBody = response.body();
|
||||||
|
if (statusCode == 200) {
|
||||||
|
return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class);
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a custom model from a model file. Read more about custom model file creation <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
|
||||||
|
*
|
||||||
|
* @param modelName the name of the custom model to be created.
|
||||||
|
* @param modelFilePath the path to model file that exists on the Ollama server.
|
||||||
|
*/
|
||||||
|
public void createModelWithFilePath(String modelName, String modelFilePath)
|
||||||
|
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
|
||||||
|
String url = this.host + "/api/create";
|
||||||
|
String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseString = response.body();
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
// FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this
|
||||||
|
// if the issue is fixed in the Ollama API server.
|
||||||
|
if (responseString.contains("error")) {
|
||||||
|
throw new OllamaBaseException(responseString);
|
||||||
|
}
|
||||||
|
if (verbose) {
|
||||||
|
logger.info(responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a custom model from a model file. Read more about custom model file creation <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>.
|
||||||
|
*
|
||||||
|
* @param modelName the name of the custom model to be created.
|
||||||
|
* @param modelFileContents the path to model file that exists on the Ollama server.
|
||||||
|
*/
|
||||||
|
public void createModelWithModelFileContents(String modelName, String modelFileContents)
|
||||||
|
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
|
||||||
|
String url = this.host + "/api/create";
|
||||||
|
String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseString = response.body();
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
if (responseString.contains("error")) {
|
||||||
|
throw new OllamaBaseException(responseString);
|
||||||
|
}
|
||||||
|
if (verbose) {
|
||||||
|
logger.info(responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a model from Ollama server.
|
||||||
|
*
|
||||||
|
* @param modelName the name of the model to be deleted.
|
||||||
|
* @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server.
|
||||||
|
*/
|
||||||
|
public void deleteModel(String modelName, boolean ignoreIfNotPresent)
|
||||||
|
throws IOException, InterruptedException, OllamaBaseException, URISyntaxException {
|
||||||
|
String url = this.host + "/api/delete";
|
||||||
|
String jsonData = new ModelRequest(modelName).toString();
|
||||||
|
HttpRequest request =
|
||||||
|
getRequestBuilderDefault(new URI(url))
|
||||||
|
.method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
|
.build();
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseBody = response.body();
|
||||||
|
if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate embeddings for a given text from a model
|
||||||
|
*
|
||||||
|
* @param model name of model to generate embeddings from
|
||||||
|
* @param prompt text to generate embeddings for
|
||||||
|
* @return embeddings
|
||||||
|
*/
|
||||||
|
public List<Double> generateEmbeddings(String model, String prompt)
|
||||||
|
throws IOException, InterruptedException, OllamaBaseException {
|
||||||
|
return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate embeddings using a {@link OllamaEmbeddingsRequestModel}.
|
||||||
|
*
|
||||||
|
* @param modelRequest request for '/api/embeddings' endpoint
|
||||||
|
* @return embeddings
|
||||||
|
*/
|
||||||
|
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
|
||||||
|
URI uri = URI.create(this.host + "/api/embeddings");
|
||||||
|
String jsonData = modelRequest.toString();
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
getRequestBuilderDefault(uri)
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData));
|
||||||
|
HttpRequest request = requestBuilder.build();
|
||||||
|
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
String responseBody = response.body();
|
||||||
|
if (statusCode == 200) {
|
||||||
|
OllamaEmbeddingResponseModel embeddingResponse =
|
||||||
|
Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
|
||||||
|
return embeddingResponse.getEmbedding();
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
||||||
|
* call.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param prompt the prompt/question text
|
||||||
|
* @param options the Options object - <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
|
||||||
|
* details on the options</a>
|
||||||
|
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
|
||||||
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
|
*/
|
||||||
|
public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
|
||||||
|
ollamaRequestModel.setRaw(raw);
|
||||||
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates response using the specified AI model and prompt (in blocking mode).
|
||||||
|
* <p>
|
||||||
|
* Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)}
|
||||||
|
*
|
||||||
|
* @param model The name or identifier of the AI model to use for generating the response.
|
||||||
|
* @param prompt The input text or prompt to provide to the AI model.
|
||||||
|
* @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
|
||||||
|
* @param options Additional options or configurations to use when generating the response.
|
||||||
|
* @return {@link OllamaResult}
|
||||||
|
*/
|
||||||
|
public OllamaResult generate(String model, String prompt, boolean raw, Options options)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
return generate(model, prompt, raw, options, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools
|
||||||
|
* on the generated response.
|
||||||
|
*
|
||||||
|
* @param model The name or identifier of the AI model to use for generating the response.
|
||||||
|
* @param prompt The input text or prompt to provide to the AI model.
|
||||||
|
* @param options Additional options or configurations to use when generating the response.
|
||||||
|
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output.
|
||||||
|
* @throws OllamaBaseException If there is an error related to the Ollama API or service.
|
||||||
|
* @throws IOException If there is an error related to input/output operations.
|
||||||
|
* @throws InterruptedException If the method is interrupted while waiting for the AI model
|
||||||
|
* to generate the response or for the tools to be invoked.
|
||||||
|
*/
|
||||||
|
public OllamaToolsResult generateWithTools(String model, String prompt, Options options)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
|
||||||
|
boolean raw = true;
|
||||||
|
OllamaToolsResult toolResult = new OllamaToolsResult();
|
||||||
|
Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
|
||||||
|
|
||||||
|
OllamaResult result = generate(model, prompt, raw, options, null);
|
||||||
|
toolResult.setModelResult(result);
|
||||||
|
|
||||||
|
String toolsResponse = result.getResponse();
|
||||||
|
if (toolsResponse.contains("[TOOL_CALLS]")) {
|
||||||
|
toolsResponse = toolsResponse.replace("[TOOL_CALLS]", "");
|
||||||
|
}
|
||||||
|
|
||||||
|
List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class));
|
||||||
|
for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) {
|
||||||
|
toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec));
|
||||||
|
}
|
||||||
|
toolResult.setToolResults(toolResults);
|
||||||
|
return toolResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate response for a question to a model running on Ollama server and get a callback handle
|
||||||
|
* that can be used to check for status and get the response from the model later. This would be
|
||||||
|
* an async/non-blocking call.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param prompt the prompt/question text
|
||||||
|
* @return the ollama async result callback handle
|
||||||
|
*/
|
||||||
|
public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
|
||||||
|
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
|
||||||
|
ollamaRequestModel.setRaw(raw);
|
||||||
|
URI uri = URI.create(this.host + "/api/generate");
|
||||||
|
OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
|
||||||
|
new OllamaAsyncResultStreamer(
|
||||||
|
getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
|
||||||
|
ollamaAsyncResultStreamer.start();
|
||||||
|
return ollamaAsyncResultStreamer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* With one or more image files, ask a question to a model running on Ollama server. This is a
|
||||||
|
* sync/blocking call.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param prompt the prompt/question text
|
||||||
|
* @param imageFiles the list of image files to use for the question
|
||||||
|
* @param options the Options object - <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
|
||||||
|
* details on the options</a>
|
||||||
|
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
|
||||||
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
|
*/
|
||||||
|
public OllamaResult generateWithImageFiles(
|
||||||
|
String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
List<String> images = new ArrayList<>();
|
||||||
|
for (File imageFile : imageFiles) {
|
||||||
|
images.add(encodeFileToBase64(imageFile));
|
||||||
|
}
|
||||||
|
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
|
||||||
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
|
* <p>
|
||||||
|
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
|
||||||
|
*/
|
||||||
|
public OllamaResult generateWithImageFiles(
|
||||||
|
String model, String prompt, List<File> imageFiles, Options options)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
return generateWithImageFiles(model, prompt, imageFiles, options, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
|
||||||
|
* sync/blocking call.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param prompt the prompt/question text
|
||||||
|
* @param imageURLs the list of image URLs to use for the question
|
||||||
|
* @param options the Options object - <a
|
||||||
|
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
|
||||||
|
* details on the options</a>
|
||||||
|
* @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
|
||||||
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
|
*/
|
||||||
|
public OllamaResult generateWithImageURLs(
|
||||||
|
String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
|
List<String> images = new ArrayList<>();
|
||||||
|
for (String imageURL : imageURLs) {
|
||||||
|
images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
|
||||||
|
}
|
||||||
|
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
|
||||||
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
|
* <p>
|
||||||
|
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
|
||||||
|
*/
|
||||||
|
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
|
||||||
|
Options options)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
|
return generateWithImageURLs(model, prompt, imageURLs, options, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
||||||
|
* 'api/chat'.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param messages chat history / message stack to send to the model
|
||||||
|
* @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response.
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
|
||||||
|
return chat(builder.withMessages(messages).build());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||||
|
* <p>
|
||||||
|
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||||
|
*
|
||||||
|
* @param request request object to be sent to the server
|
||||||
|
* @return {@link OllamaChatResult}
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
return chat(request, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||||
|
* <p>
|
||||||
|
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||||
|
*
|
||||||
|
* @param request request object to be sent to the server
|
||||||
|
* @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
|
||||||
|
* @return {@link OllamaChatResult}
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
OllamaResult result;
|
||||||
|
if (streamHandler != null) {
|
||||||
|
request.setStream(true);
|
||||||
|
result = requestCaller.call(request, streamHandler);
|
||||||
|
} else {
|
||||||
|
result = requestCaller.callSync(request);
|
||||||
|
}
|
||||||
|
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void registerTool(Tools.ToolSpecification toolSpecification) {
|
||||||
|
toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
|
||||||
|
}
|
||||||
|
|
||||||
|
// technical private methods //
|
||||||
|
|
||||||
|
private static String encodeFileToBase64(File file) throws IOException {
|
||||||
|
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String encodeByteArrayToBase64(byte[] bytes) {
|
||||||
|
return Base64.getEncoder().encodeToString(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
private OllamaResult generateSyncForOllamaRequestModel(
|
||||||
|
OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
OllamaGenerateEndpointCaller requestCaller =
|
||||||
|
new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
OllamaResult result;
|
||||||
|
if (streamHandler != null) {
|
||||||
|
ollamaRequestModel.setStream(true);
|
||||||
|
result = requestCaller.call(ollamaRequestModel, streamHandler);
|
||||||
|
} else {
|
||||||
|
result = requestCaller.callSync(ollamaRequestModel);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get default request builder.
|
||||||
|
*
|
||||||
|
* @param uri URI to get a HttpRequest.Builder
|
||||||
|
* @return HttpRequest.Builder
|
||||||
|
*/
|
||||||
|
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
HttpRequest.newBuilder(uri)
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds));
|
||||||
|
if (isBasicAuthCredentialsSet()) {
|
||||||
|
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
||||||
|
}
|
||||||
|
return requestBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get basic authentication header value.
|
||||||
|
*
|
||||||
|
* @return basic authentication header value (encoded credentials)
|
||||||
|
*/
|
||||||
|
private String getBasicAuthHeaderValue() {
|
||||||
|
String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword();
|
||||||
|
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Basic Auth credentials set.
|
||||||
|
*
|
||||||
|
* @return true when Basic Auth credentials set
|
||||||
|
*/
|
||||||
|
private boolean isBasicAuthCredentialsSet() {
|
||||||
|
return basicAuth != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException {
|
||||||
|
try {
|
||||||
|
String methodName = toolFunctionCallSpec.getName();
|
||||||
|
Map<String, Object> arguments = toolFunctionCallSpec.getArguments();
|
||||||
|
ToolFunction function = toolRegistry.getFunction(methodName);
|
||||||
|
if (verbose) {
|
||||||
|
logger.debug("Invoking function {} with arguments {}", methodName, arguments);
|
||||||
|
}
|
||||||
|
if (function == null) {
|
||||||
|
throw new ToolNotFoundException("No such tool: " + methodName);
|
||||||
|
}
|
||||||
|
return function.apply(arguments);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.exceptions;
|
package io.github.ollama4j.exceptions;
|
||||||
|
|
||||||
public class OllamaBaseException extends Exception {
|
public class OllamaBaseException extends Exception {
|
||||||
|
|
@ -0,0 +1,8 @@
|
|||||||
|
package io.github.ollama4j.exceptions;
|
||||||
|
|
||||||
|
public class ToolInvocationException extends Exception {
|
||||||
|
|
||||||
|
public ToolInvocationException(String s, Exception e) {
|
||||||
|
super(s, e);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
package io.github.ollama4j.exceptions;
|
||||||
|
|
||||||
|
public class ToolNotFoundException extends Exception {
|
||||||
|
|
||||||
|
public ToolNotFoundException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
package io.github.ollama4j.impl;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
|
||||||
|
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
|
||||||
|
private final StringBuffer response = new StringBuffer();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(String message) {
|
||||||
|
String substr = message.substring(response.length());
|
||||||
|
response.append(substr);
|
||||||
|
System.out.print(substr);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,45 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||||
|
|
||||||
|
import io.github.ollama4j.utils.FileToBase64Serializer;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
|
||||||
|
*
|
||||||
|
* @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class OllamaChatMessage {
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private OllamaChatMessageRole role;
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private String content;
|
||||||
|
|
||||||
|
@JsonSerialize(using = FileToBase64Serializer.class)
|
||||||
|
private List<byte[]> images;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonValue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the possible Chat Message roles.
|
||||||
|
*/
|
||||||
|
public enum OllamaChatMessageRole {
|
||||||
|
SYSTEM("system"),
|
||||||
|
USER("user"),
|
||||||
|
ASSISTANT("assistant");
|
||||||
|
|
||||||
|
@JsonValue
|
||||||
|
private String roleName;
|
||||||
|
|
||||||
|
private OllamaChatMessageRole(String roleName){
|
||||||
|
this.roleName = roleName;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.request.OllamaCommonRequest;
|
||||||
|
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines a Request to use against the ollama /api/chat endpoint.
|
||||||
|
*
|
||||||
|
* @see <a href=
|
||||||
|
* "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
|
||||||
|
* Chat Completion</a>
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
|
||||||
|
|
||||||
|
private List<OllamaChatMessage> messages;
|
||||||
|
|
||||||
|
public OllamaChatRequest() {}
|
||||||
|
|
||||||
|
public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
|
||||||
|
this.model = model;
|
||||||
|
this.messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (!(o instanceof OllamaChatRequest)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.toString().equals(o.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,106 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
|
||||||
|
*/
|
||||||
|
public class OllamaChatRequestBuilder {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
|
||||||
|
|
||||||
|
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
|
||||||
|
request = new OllamaChatRequest(model, messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
private OllamaChatRequest request;
|
||||||
|
|
||||||
|
public static OllamaChatRequestBuilder getInstance(String model) {
|
||||||
|
return new OllamaChatRequestBuilder(model, new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequest build() {
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reset() {
|
||||||
|
request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) {
|
||||||
|
List<OllamaChatMessage> messages = this.request.getMessages();
|
||||||
|
|
||||||
|
List<byte[]> binaryImages = images.stream().map(file -> {
|
||||||
|
try {
|
||||||
|
return Files.readAllBytes(file.toPath());
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.warn(String.format("File '%s' could not be accessed, will not add to message!", file.toPath()), e);
|
||||||
|
return new byte[0];
|
||||||
|
}
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
messages.add(new OllamaChatMessage(role, content, binaryImages));
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls) {
|
||||||
|
List<OllamaChatMessage> messages = this.request.getMessages();
|
||||||
|
List<byte[]> binaryImages = null;
|
||||||
|
if (imageUrls.length > 0) {
|
||||||
|
binaryImages = new ArrayList<>();
|
||||||
|
for (String imageUrl : imageUrls) {
|
||||||
|
try {
|
||||||
|
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e);
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.add(new OllamaChatMessage(role, content, binaryImages));
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
|
||||||
|
return new OllamaChatRequestBuilder(request.getModel(), messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withOptions(Options options) {
|
||||||
|
this.request.setOptions(options.getOptionsMap());
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withGetJsonResponse() {
|
||||||
|
this.request.setReturnFormatJson(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withTemplate(String template) {
|
||||||
|
this.request.setTemplate(template);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withStreaming() {
|
||||||
|
this.request.setStream(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withKeepAlive(String keepAlive) {
|
||||||
|
this.request.setKeepAlive(keepAlive);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class OllamaChatResponseModel {
|
||||||
|
private String model;
|
||||||
|
private @JsonProperty("created_at") String createdAt;
|
||||||
|
private @JsonProperty("done_reason") String doneReason;
|
||||||
|
private OllamaChatMessage message;
|
||||||
|
private boolean done;
|
||||||
|
private String error;
|
||||||
|
private List<Integer> context;
|
||||||
|
private @JsonProperty("total_duration") Long totalDuration;
|
||||||
|
private @JsonProperty("load_duration") Long loadDuration;
|
||||||
|
private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
|
||||||
|
private @JsonProperty("eval_duration") Long evalDuration;
|
||||||
|
private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
|
||||||
|
private @JsonProperty("eval_count") Integer evalCount;
|
||||||
|
}
|
@ -0,0 +1,26 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
|
||||||
|
* {@link OllamaChatMessageRole#ASSISTANT} role.
|
||||||
|
*/
|
||||||
|
public class OllamaChatResult extends OllamaResult{
|
||||||
|
|
||||||
|
private List<OllamaChatMessage> chatHistory;
|
||||||
|
|
||||||
|
public OllamaChatResult(String response, long responseTime, int httpStatusCode,
|
||||||
|
List<OllamaChatMessage> chatHistory) {
|
||||||
|
super(response, responseTime, httpStatusCode);
|
||||||
|
this.chatHistory = chatHistory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<OllamaChatMessage> getChatHistory() {
|
||||||
|
return chatHistory;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
package io.github.ollama4j.models.chat;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class OllamaChatStreamObserver {
|
||||||
|
|
||||||
|
private OllamaStreamHandler streamHandler;
|
||||||
|
|
||||||
|
private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
|
||||||
|
|
||||||
|
public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
|
||||||
|
this.streamHandler = streamHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void notify(OllamaChatResponseModel currentResponsePart) {
|
||||||
|
responseParts.add(currentResponsePart);
|
||||||
|
handleCurrentResponsePart(currentResponsePart);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
|
||||||
|
streamHandler.accept(currentResponsePart.getMessage().getContent());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.embeddings;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
@ -7,7 +7,7 @@ import lombok.Data;
|
|||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
@Data
|
@Data
|
||||||
public class EmbeddingResponse {
|
public class OllamaEmbeddingResponseModel {
|
||||||
@JsonProperty("embedding")
|
@JsonProperty("embedding")
|
||||||
private List<Double> embedding;
|
private List<Double> embedding;
|
||||||
}
|
}
|
@ -0,0 +1,31 @@
|
|||||||
|
package io.github.ollama4j.models.embeddings;
|
||||||
|
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
|
||||||
|
public class OllamaEmbeddingsRequestBuilder {
|
||||||
|
|
||||||
|
private OllamaEmbeddingsRequestBuilder(String model, String prompt){
|
||||||
|
request = new OllamaEmbeddingsRequestModel(model, prompt);
|
||||||
|
}
|
||||||
|
|
||||||
|
private OllamaEmbeddingsRequestModel request;
|
||||||
|
|
||||||
|
public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
|
||||||
|
return new OllamaEmbeddingsRequestBuilder(model, prompt);
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaEmbeddingsRequestModel build(){
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaEmbeddingsRequestBuilder withOptions(Options options){
|
||||||
|
this.request.setOptions(options.getOptionsMap());
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
|
||||||
|
this.request.setKeepAlive(keepAlive);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,33 @@
|
|||||||
|
package io.github.ollama4j.models.embeddings;
|
||||||
|
|
||||||
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
import java.util.Map;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class OllamaEmbeddingsRequestModel {
|
||||||
|
@NonNull
|
||||||
|
private String model;
|
||||||
|
@NonNull
|
||||||
|
private String prompt;
|
||||||
|
|
||||||
|
protected Map<String, Object> options;
|
||||||
|
@JsonProperty(value = "keep_alive")
|
||||||
|
private String keepAlive;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,46 @@
|
|||||||
|
package io.github.ollama4j.models.generate;
|
||||||
|
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.request.OllamaCommonRequest;
|
||||||
|
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@Setter
|
||||||
|
public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
|
||||||
|
|
||||||
|
private String prompt;
|
||||||
|
private List<String> images;
|
||||||
|
|
||||||
|
private String system;
|
||||||
|
private String context;
|
||||||
|
private boolean raw;
|
||||||
|
|
||||||
|
public OllamaGenerateRequest() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequest(String model, String prompt) {
|
||||||
|
this.model = model;
|
||||||
|
this.prompt = prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequest(String model, String prompt, List<String> images) {
|
||||||
|
this.model = model;
|
||||||
|
this.prompt = prompt;
|
||||||
|
this.images = images;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (!(o instanceof OllamaGenerateRequest)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.toString().equals(o.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,55 @@
|
|||||||
|
package io.github.ollama4j.models.generate;
|
||||||
|
|
||||||
|
import io.github.ollama4j.utils.Options;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class for creating {@link OllamaGenerateRequest}
|
||||||
|
* objects using the builder-pattern.
|
||||||
|
*/
|
||||||
|
public class OllamaGenerateRequestBuilder {
|
||||||
|
|
||||||
|
private OllamaGenerateRequestBuilder(String model, String prompt){
|
||||||
|
request = new OllamaGenerateRequest(model, prompt);
|
||||||
|
}
|
||||||
|
|
||||||
|
private OllamaGenerateRequest request;
|
||||||
|
|
||||||
|
public static OllamaGenerateRequestBuilder getInstance(String model){
|
||||||
|
return new OllamaGenerateRequestBuilder(model,"");
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequest build(){
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withPrompt(String prompt){
|
||||||
|
request.setPrompt(prompt);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withGetJsonResponse(){
|
||||||
|
this.request.setReturnFormatJson(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withOptions(Options options){
|
||||||
|
this.request.setOptions(options.getOptionsMap());
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withTemplate(String template){
|
||||||
|
this.request.setTemplate(template);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withStreaming(){
|
||||||
|
this.request.setStream(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
|
||||||
|
this.request.setKeepAlive(keepAlive);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.generate;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
@ -8,7 +8,7 @@ import lombok.Data;
|
|||||||
|
|
||||||
@Data
|
@Data
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
public class OllamaResponseModel {
|
public class OllamaGenerateResponseModel {
|
||||||
private String model;
|
private String model;
|
||||||
private @JsonProperty("created_at") String createdAt;
|
private @JsonProperty("created_at") String createdAt;
|
||||||
private String response;
|
private String response;
|
@ -0,0 +1,29 @@
|
|||||||
|
package io.github.ollama4j.models.generate;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class OllamaGenerateStreamObserver {
|
||||||
|
|
||||||
|
private OllamaStreamHandler streamHandler;
|
||||||
|
|
||||||
|
private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
|
||||||
|
|
||||||
|
private String message = "";
|
||||||
|
|
||||||
|
public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
|
||||||
|
this.streamHandler = streamHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void notify(OllamaGenerateResponseModel currentResponsePart) {
|
||||||
|
responseParts.add(currentResponsePart);
|
||||||
|
handleCurrentResponsePart(currentResponsePart);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) {
|
||||||
|
message = message + currentResponsePart.getResponse();
|
||||||
|
streamHandler.accept(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,7 @@
|
|||||||
|
package io.github.ollama4j.models.generate;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public interface OllamaStreamHandler extends Consumer<String> {
|
||||||
|
void accept(String message);
|
||||||
|
}
|
@ -0,0 +1,63 @@
|
|||||||
|
package io.github.ollama4j.models.ps;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class ModelsProcessResponse {
|
||||||
|
@JsonProperty("models")
|
||||||
|
private List<ModelProcess> models;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
public static class ModelProcess {
|
||||||
|
@JsonProperty("name")
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
@JsonProperty("model")
|
||||||
|
private String model;
|
||||||
|
|
||||||
|
@JsonProperty("size")
|
||||||
|
private long size;
|
||||||
|
|
||||||
|
@JsonProperty("digest")
|
||||||
|
private String digest;
|
||||||
|
|
||||||
|
@JsonProperty("details")
|
||||||
|
private ModelDetails details;
|
||||||
|
|
||||||
|
@JsonProperty("expires_at")
|
||||||
|
private String expiresAt; // Consider using LocalDateTime if you need to process date/time
|
||||||
|
|
||||||
|
@JsonProperty("size_vram")
|
||||||
|
private long sizeVram;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
public static class ModelDetails {
|
||||||
|
@JsonProperty("parent_model")
|
||||||
|
private String parentModel;
|
||||||
|
|
||||||
|
@JsonProperty("format")
|
||||||
|
private String format;
|
||||||
|
|
||||||
|
@JsonProperty("family")
|
||||||
|
private String family;
|
||||||
|
|
||||||
|
@JsonProperty("families")
|
||||||
|
private List<String> families;
|
||||||
|
|
||||||
|
@JsonProperty("parameter_size")
|
||||||
|
private String parameterSize;
|
||||||
|
|
||||||
|
@JsonProperty("quantization_level")
|
||||||
|
private String quantizationLevel;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class BasicAuth {
|
||||||
|
private String username;
|
||||||
|
private String password;
|
||||||
|
}
|
@ -1,6 +1,6 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
@ -1,6 +1,6 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
@ -1,6 +1,6 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
@ -0,0 +1,54 @@
|
|||||||
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatResponseModel;
|
||||||
|
import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialization class for requests
|
||||||
|
*/
|
||||||
|
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
||||||
|
|
||||||
|
private OllamaChatStreamObserver streamObserver;
|
||||||
|
|
||||||
|
public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEndpointSuffix() {
|
||||||
|
return "/api/chat";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
|
try {
|
||||||
|
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
||||||
|
if (streamObserver != null) {
|
||||||
|
streamObserver.notify(ollamaResponseModel);
|
||||||
|
}
|
||||||
|
return ollamaResponseModel.isDone();
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
LOG.error("Error parsing the Ollama chat response!", e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
||||||
|
return super.callSync(body);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||||
|
|
||||||
|
import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
public abstract class OllamaCommonRequest {
|
||||||
|
|
||||||
|
protected String model;
|
||||||
|
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
|
||||||
|
@JsonProperty(value = "format")
|
||||||
|
protected Boolean returnFormatJson;
|
||||||
|
protected Map<String, Object> options;
|
||||||
|
protected String template;
|
||||||
|
protected boolean stream;
|
||||||
|
@JsonProperty(value = "keep_alive")
|
||||||
|
protected String keepAlive;
|
||||||
|
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,151 @@
|
|||||||
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.models.response.OllamaErrorResponse;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.Base64;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract helperclass to call the ollama api server.
|
||||||
|
*/
|
||||||
|
public abstract class OllamaEndpointCaller {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
|
|
||||||
|
private String host;
|
||||||
|
private BasicAuth basicAuth;
|
||||||
|
private long requestTimeoutSeconds;
|
||||||
|
private boolean verbose;
|
||||||
|
|
||||||
|
public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
this.host = host;
|
||||||
|
this.basicAuth = basicAuth;
|
||||||
|
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||||
|
this.verbose = verbose;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract String getEndpointSuffix();
|
||||||
|
|
||||||
|
protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
|
||||||
|
*
|
||||||
|
* @param body POST body payload
|
||||||
|
* @return result answer given by the assistant
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
// Create Request
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
URI uri = URI.create(this.host + getEndpointSuffix());
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
getRequestBuilderDefault(uri)
|
||||||
|
.POST(
|
||||||
|
body.getBodyPublisher());
|
||||||
|
HttpRequest request = requestBuilder.build();
|
||||||
|
if (this.verbose) LOG.info("Asking model: " + body.toString());
|
||||||
|
HttpResponse<InputStream> response =
|
||||||
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
InputStream responseBodyStream = response.body();
|
||||||
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
|
try (BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
if (statusCode == 404) {
|
||||||
|
LOG.warn("Status code: 404 (Not Found)");
|
||||||
|
OllamaErrorResponse ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else if (statusCode == 401) {
|
||||||
|
LOG.warn("Status code: 401 (Unauthorized)");
|
||||||
|
OllamaErrorResponse ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper()
|
||||||
|
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else if (statusCode == 400) {
|
||||||
|
LOG.warn("Status code: 400 (Bad Request)");
|
||||||
|
OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
|
||||||
|
OllamaErrorResponse.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else {
|
||||||
|
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
|
||||||
|
if (finished) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode != 200) {
|
||||||
|
LOG.error("Status code " + statusCode);
|
||||||
|
throw new OllamaBaseException(responseBuffer.toString());
|
||||||
|
} else {
|
||||||
|
long endTime = System.currentTimeMillis();
|
||||||
|
OllamaResult ollamaResult =
|
||||||
|
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
||||||
|
if (verbose) LOG.info("Model response: " + ollamaResult);
|
||||||
|
return ollamaResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get default request builder.
|
||||||
|
*
|
||||||
|
* @param uri URI to get a HttpRequest.Builder
|
||||||
|
* @return HttpRequest.Builder
|
||||||
|
*/
|
||||||
|
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
HttpRequest.newBuilder(uri)
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
|
||||||
|
if (isBasicAuthCredentialsSet()) {
|
||||||
|
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
||||||
|
}
|
||||||
|
return requestBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get basic authentication header value.
|
||||||
|
*
|
||||||
|
* @return basic authentication header value (encoded credentials)
|
||||||
|
*/
|
||||||
|
private String getBasicAuthHeaderValue() {
|
||||||
|
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
|
||||||
|
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Basic Auth credentials set.
|
||||||
|
*
|
||||||
|
* @return true when Basic Auth credentials set
|
||||||
|
*/
|
||||||
|
private boolean isBasicAuthCredentialsSet() {
|
||||||
|
return this.basicAuth != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,51 @@
|
|||||||
|
package io.github.ollama4j.models.request;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||||
|
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
||||||
|
|
||||||
|
private OllamaGenerateStreamObserver streamObserver;
|
||||||
|
|
||||||
|
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEndpointSuffix() {
|
||||||
|
return "/api/generate";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
|
try {
|
||||||
|
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||||
|
if (streamObserver != null) {
|
||||||
|
streamObserver.notify(ollamaResponseModel);
|
||||||
|
}
|
||||||
|
return ollamaResponseModel.isDone();
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
LOG.error("Error parsing the Ollama chat response!", e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||||
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
||||||
|
return super.callSync(body);
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
@ -1,14 +1,21 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
public class Model {
|
public class Model {
|
||||||
|
|
||||||
private String name;
|
private String name;
|
||||||
|
private String model;
|
||||||
@JsonProperty("modified_at")
|
@JsonProperty("modified_at")
|
||||||
private String modifiedAt;
|
private OffsetDateTime modifiedAt;
|
||||||
|
@JsonProperty("expires_at")
|
||||||
|
private OffsetDateTime expiresAt;
|
||||||
private String digest;
|
private String digest;
|
||||||
private long size;
|
private long size;
|
||||||
@JsonProperty("details")
|
@JsonProperty("details")
|
||||||
@ -33,4 +40,13 @@ public class Model {
|
|||||||
return name.split(":")[1];
|
return name.split(":")[1];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class ModelDetail {
|
||||||
|
private String license;
|
||||||
|
|
||||||
|
@JsonProperty("modelfile")
|
||||||
|
private String modelFile;
|
||||||
|
|
||||||
|
private String parameters;
|
||||||
|
private String template;
|
||||||
|
private String system;
|
||||||
|
private ModelMeta details;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@ -21,4 +23,13 @@ public class ModelMeta {
|
|||||||
|
|
||||||
@JsonProperty("quantization_level")
|
@JsonProperty("quantization_level")
|
||||||
private String quantizationLevel;
|
private String quantizationLevel;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
@ -0,0 +1,123 @@
|
|||||||
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
|
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||||
|
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.EqualsAndHashCode;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.Setter;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.Duration;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@EqualsAndHashCode(callSuper = true)
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
public class OllamaAsyncResultStreamer extends Thread {
|
||||||
|
private final HttpRequest.Builder requestBuilder;
|
||||||
|
private final OllamaGenerateRequest ollamaRequestModel;
|
||||||
|
private final OllamaResultStream stream = new OllamaResultStream();
|
||||||
|
private String completeResponse;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
|
||||||
|
* failure. If the request was a failure, the `getResponse()` method will return the error
|
||||||
|
* message.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private boolean succeeded;
|
||||||
|
|
||||||
|
@Setter
|
||||||
|
private long requestTimeoutSeconds;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
|
||||||
|
* server.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private int httpStatusCode;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* -- GETTER -- Returns the response time in milliseconds.
|
||||||
|
*/
|
||||||
|
@Getter
|
||||||
|
private long responseTime = 0;
|
||||||
|
|
||||||
|
public OllamaAsyncResultStreamer(
|
||||||
|
HttpRequest.Builder requestBuilder,
|
||||||
|
OllamaGenerateRequest ollamaRequestModel,
|
||||||
|
long requestTimeoutSeconds) {
|
||||||
|
this.requestBuilder = requestBuilder;
|
||||||
|
this.ollamaRequestModel = ollamaRequestModel;
|
||||||
|
this.completeResponse = "";
|
||||||
|
this.stream.add("");
|
||||||
|
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
ollamaRequestModel.setStream(true);
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
try {
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
HttpRequest request =
|
||||||
|
requestBuilder
|
||||||
|
.POST(
|
||||||
|
HttpRequest.BodyPublishers.ofString(
|
||||||
|
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||||
|
.build();
|
||||||
|
HttpResponse<InputStream> response =
|
||||||
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
this.httpStatusCode = statusCode;
|
||||||
|
|
||||||
|
InputStream responseBodyStream = response.body();
|
||||||
|
try (BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
|
String line;
|
||||||
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
if (statusCode == 404) {
|
||||||
|
OllamaErrorResponse ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
|
||||||
|
stream.add(ollamaResponseModel.getError());
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else {
|
||||||
|
OllamaGenerateResponseModel ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||||
|
String res = ollamaResponseModel.getResponse();
|
||||||
|
stream.add(res);
|
||||||
|
if (!ollamaResponseModel.isDone()) {
|
||||||
|
responseBuffer.append(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.succeeded = true;
|
||||||
|
this.completeResponse = responseBuffer.toString();
|
||||||
|
long endTime = System.currentTimeMillis();
|
||||||
|
responseTime = endTime - startTime;
|
||||||
|
}
|
||||||
|
if (statusCode != 200) {
|
||||||
|
throw new OllamaBaseException(this.completeResponse);
|
||||||
|
}
|
||||||
|
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
||||||
|
this.succeeded = false;
|
||||||
|
this.completeResponse = "[FAILED] " + e.getMessage();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,11 @@
|
|||||||
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class OllamaErrorResponse {
|
||||||
|
|
||||||
|
private String error;
|
||||||
|
}
|
@ -1,6 +1,6 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@ -13,9 +13,9 @@ import lombok.Getter;
|
|||||||
public class OllamaResult {
|
public class OllamaResult {
|
||||||
/**
|
/**
|
||||||
* -- GETTER --
|
* -- GETTER --
|
||||||
* Get the response text
|
* Get the completion/response text
|
||||||
*
|
*
|
||||||
* @return String - response text
|
* @return String completion/response text
|
||||||
*/
|
*/
|
||||||
private final String response;
|
private final String response;
|
||||||
|
|
@ -0,0 +1,18 @@
|
|||||||
|
package io.github.ollama4j.models.response;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.Queue;
|
||||||
|
|
||||||
|
public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
|
||||||
|
@Override
|
||||||
|
public String poll() {
|
||||||
|
StringBuilder tokens = new StringBuilder();
|
||||||
|
Iterator<String> iterator = this.listIterator();
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
tokens.append(iterator.next());
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
|
return tokens.toString();
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
package io.github.ollama4j.tools;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.response.OllamaResult;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class OllamaToolsResult {
|
||||||
|
private OllamaResult modelResult;
|
||||||
|
private Map<ToolFunctionCallSpec, Object> toolResults;
|
||||||
|
|
||||||
|
public List<ToolResult> getToolResults() {
|
||||||
|
List<ToolResult> results = new ArrayList<>();
|
||||||
|
for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
|
||||||
|
results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class ToolResult {
|
||||||
|
private String functionName;
|
||||||
|
private Map<String, Object> functionArguments;
|
||||||
|
private Object result;
|
||||||
|
}
|
||||||
|
}
|
8
src/main/java/io/github/ollama4j/tools/ToolFunction.java
Normal file
8
src/main/java/io/github/ollama4j/tools/ToolFunction.java
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
package io.github.ollama4j.tools;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface ToolFunction {
|
||||||
|
Object apply(Map<String, Object> arguments);
|
||||||
|
}
|
@ -0,0 +1,16 @@
|
|||||||
|
package io.github.ollama4j.tools;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class ToolFunctionCallSpec {
|
||||||
|
private String name;
|
||||||
|
private Map<String, Object> arguments;
|
||||||
|
}
|
||||||
|
|
16
src/main/java/io/github/ollama4j/tools/ToolRegistry.java
Normal file
16
src/main/java/io/github/ollama4j/tools/ToolRegistry.java
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package io.github.ollama4j.tools;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ToolRegistry {
|
||||||
|
private final Map<String, ToolFunction> functionMap = new HashMap<>();
|
||||||
|
|
||||||
|
public ToolFunction getFunction(String name) {
|
||||||
|
return functionMap.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFunction(String name, ToolFunction function) {
|
||||||
|
functionMap.put(name, function);
|
||||||
|
}
|
||||||
|
}
|
113
src/main/java/io/github/ollama4j/tools/Tools.java
Normal file
113
src/main/java/io/github/ollama4j/tools/Tools.java
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package io.github.ollama4j.tools;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import io.github.ollama4j.utils.Utils;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class Tools {
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class ToolSpecification {
|
||||||
|
private String functionName;
|
||||||
|
private String functionDescription;
|
||||||
|
private Map<String, PromptFuncDefinition.Property> properties;
|
||||||
|
private ToolFunction toolDefinition;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public static class PromptFuncDefinition {
|
||||||
|
private String type;
|
||||||
|
private PromptFuncSpec function;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class PromptFuncSpec {
|
||||||
|
private String name;
|
||||||
|
private String description;
|
||||||
|
private Parameters parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class Parameters {
|
||||||
|
private String type;
|
||||||
|
private Map<String, Property> properties;
|
||||||
|
private List<String> required;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@Builder
|
||||||
|
public static class Property {
|
||||||
|
private String type;
|
||||||
|
private String description;
|
||||||
|
@JsonProperty("enum")
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
private List<String> enumValues;
|
||||||
|
@JsonIgnore
|
||||||
|
private boolean required;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PropsBuilder {
|
||||||
|
private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
|
||||||
|
|
||||||
|
public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
|
||||||
|
props.put(key, property);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, PromptFuncDefinition.Property> build() {
|
||||||
|
return props;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class PromptBuilder {
|
||||||
|
private final List<PromptFuncDefinition> tools = new ArrayList<>();
|
||||||
|
|
||||||
|
private String promptText;
|
||||||
|
|
||||||
|
public String build() throws JsonProcessingException {
|
||||||
|
return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
|
||||||
|
promptText = prompt;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PromptBuilder withToolSpecification(ToolSpecification spec) {
|
||||||
|
PromptFuncDefinition def = new PromptFuncDefinition();
|
||||||
|
def.setType("function");
|
||||||
|
|
||||||
|
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
|
||||||
|
functionDetail.setName(spec.getFunctionName());
|
||||||
|
functionDetail.setDescription(spec.getFunctionDescription());
|
||||||
|
|
||||||
|
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
|
||||||
|
parameters.setType("object");
|
||||||
|
parameters.setProperties(spec.getProperties());
|
||||||
|
|
||||||
|
List<String> requiredValues = new ArrayList<>();
|
||||||
|
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
|
||||||
|
if (p.getValue().isRequired()) {
|
||||||
|
requiredValues.add(p.getKey());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
parameters.setRequired(requiredValues);
|
||||||
|
functionDetail.setParameters(parameters);
|
||||||
|
def.setFunction(functionDetail);
|
||||||
|
|
||||||
|
tools.add(def);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
88
src/main/java/io/github/ollama4j/types/OllamaModelType.java
Normal file
88
src/main/java/io/github/ollama4j/types/OllamaModelType.java
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
package io.github.ollama4j.types;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A class to provide constants for all the supported models by Ollama.
|
||||||
|
*
|
||||||
|
* <p>Refer to the full list of models and the details here: <a
|
||||||
|
* href="https://ollama.ai/library">https://ollama.ai/library</a>
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("ALL")
|
||||||
|
public class OllamaModelType {
|
||||||
|
public static final String GEMMA = "gemma";
|
||||||
|
public static final String GEMMA2 = "gemma2";
|
||||||
|
|
||||||
|
|
||||||
|
public static final String LLAMA2 = "llama2";
|
||||||
|
public static final String LLAMA3 = "llama3";
|
||||||
|
public static final String MISTRAL = "mistral";
|
||||||
|
public static final String MIXTRAL = "mixtral";
|
||||||
|
public static final String LLAVA = "llava";
|
||||||
|
public static final String LLAVA_PHI3 = "llava-phi3";
|
||||||
|
public static final String NEURAL_CHAT = "neural-chat";
|
||||||
|
public static final String CODELLAMA = "codellama";
|
||||||
|
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
||||||
|
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
||||||
|
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
||||||
|
public static final String PHI = "phi";
|
||||||
|
public static final String PHI3 = "phi3";
|
||||||
|
public static final String ORCA_MINI = "orca-mini";
|
||||||
|
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
||||||
|
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
|
||||||
|
public static final String VICUNA = "vicuna";
|
||||||
|
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
||||||
|
public static final String ZEPHYR = "zephyr";
|
||||||
|
public static final String OPENHERMES = "openhermes";
|
||||||
|
public static final String QWEN = "qwen";
|
||||||
|
|
||||||
|
public static final String QWEN2 = "qwen2";
|
||||||
|
public static final String WIZARDCODER = "wizardcoder";
|
||||||
|
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||||
|
public static final String TINYLLAMA = "tinyllama";
|
||||||
|
public static final String PHIND_CODELLAMA = "phind-codellama";
|
||||||
|
public static final String OPENCHAT = "openchat";
|
||||||
|
public static final String ORCA2 = "orca2";
|
||||||
|
public static final String FALCON = "falcon";
|
||||||
|
public static final String WIZARD_MATH = "wizard-math";
|
||||||
|
public static final String TINYDOLPHIN = "tinydolphin";
|
||||||
|
public static final String NOUS_HERMES = "nous-hermes";
|
||||||
|
public static final String YI = "yi";
|
||||||
|
public static final String DOLPHIN_PHI = "dolphin-phi";
|
||||||
|
public static final String STARLING_LM = "starling-lm";
|
||||||
|
public static final String STARCODER = "starcoder";
|
||||||
|
public static final String CODEUP = "codeup";
|
||||||
|
public static final String MEDLLAMA2 = "medllama2";
|
||||||
|
public static final String STABLE_CODE = "stable-code";
|
||||||
|
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
||||||
|
public static final String BAKLLAVA = "bakllava";
|
||||||
|
public static final String EVERYTHINGLM = "everythinglm";
|
||||||
|
public static final String SOLAR = "solar";
|
||||||
|
public static final String STABLE_BELUGA = "stable-beluga";
|
||||||
|
public static final String SQLCODER = "sqlcoder";
|
||||||
|
public static final String YARN_MISTRAL = "yarn-mistral";
|
||||||
|
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
|
||||||
|
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
||||||
|
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
||||||
|
public static final String MEDITRON = "meditron";
|
||||||
|
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
||||||
|
public static final String STABLELM2 = "stablelm2";
|
||||||
|
public static final String MAGICODER = "magicoder";
|
||||||
|
public static final String YARN_LLAMA2 = "yarn-llama2";
|
||||||
|
public static final String NOUS_HERMES2 = "nous-hermes2";
|
||||||
|
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
||||||
|
public static final String LLAMA_PRO = "llama-pro";
|
||||||
|
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
||||||
|
public static final String CODEBOOGA = "codebooga";
|
||||||
|
public static final String MISTRALLITE = "mistrallite";
|
||||||
|
public static final String NEXUSRAVEN = "nexusraven";
|
||||||
|
public static final String GOLIATH = "goliath";
|
||||||
|
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
|
||||||
|
public static final String NOTUX = "notux";
|
||||||
|
public static final String ALFRED = "alfred";
|
||||||
|
public static final String MEGADOLPHIN = "megadolphin";
|
||||||
|
public static final String WIZARDLM = "wizardlm";
|
||||||
|
public static final String XWINLM = "xwinlm";
|
||||||
|
public static final String NOTUS = "notus";
|
||||||
|
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
||||||
|
public static final String ALL_MINILM = "all-minilm";
|
||||||
|
public static final String CODESTRAL = "codestral";
|
||||||
|
}
|
@ -0,0 +1,21 @@
|
|||||||
|
package io.github.ollama4j.utils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
|
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||||
|
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||||
|
|
||||||
|
public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
|
||||||
|
gen.writeString("json");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isEmpty(SerializerProvider provider,Boolean value){
|
||||||
|
return !value;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,21 @@
|
|||||||
|
package io.github.ollama4j.utils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Base64;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
|
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||||
|
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||||
|
|
||||||
|
public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException {
|
||||||
|
jsonGenerator.writeStartArray();
|
||||||
|
for (byte[] file : value) {
|
||||||
|
jsonGenerator.writeString(Base64.getEncoder().encodeToString(file));
|
||||||
|
}
|
||||||
|
jsonGenerator.writeEndArray();
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
package io.github.ollama4j.utils;
|
||||||
|
|
||||||
|
import java.net.http.HttpRequest.BodyPublisher;
|
||||||
|
import java.net.http.HttpRequest.BodyPublishers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}.
|
||||||
|
*/
|
||||||
|
public interface OllamaRequestBody {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transforms the OllamaRequest Object to a JSON Object via Jackson.
|
||||||
|
*
|
||||||
|
* @return JSON representation of a OllamaRequest
|
||||||
|
*/
|
||||||
|
@JsonIgnore
|
||||||
|
default BodyPublisher getBodyPublisher(){
|
||||||
|
try {
|
||||||
|
return BodyPublishers.ofString(
|
||||||
|
Utils.getObjectMapper().writeValueAsString(this));
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new IllegalArgumentException("Request not Body convertible.",e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
11
src/main/java/io/github/ollama4j/utils/Options.java
Normal file
11
src/main/java/io/github/ollama4j/utils/Options.java
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
package io.github.ollama4j.utils;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
/** Class for options for Ollama model. */
|
||||||
|
@Data
|
||||||
|
public class Options {
|
||||||
|
|
||||||
|
private final Map<String, Object> optionsMap;
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user