forked from Mirror/ollama4j
Compare commits
282 Commits
docs-build
...
1.0.77
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81689be194 | ||
|
|
fd93036d08 | ||
|
|
c9b05a725b | ||
|
|
a4e1b4afe9 | ||
|
|
3d21813abb | ||
|
|
383d0f56ca | ||
|
|
af1b213a76 | ||
|
|
fed89a9643 | ||
|
|
fd32aa33ff | ||
|
|
b8a13e89b1 | ||
|
|
c8f27edd6e | ||
|
|
5a936d8174 | ||
|
|
9b5ddbf4c4 | ||
|
|
7c233d5734 | ||
|
|
e85aeae6e0 | ||
|
|
a05052e095 | ||
|
|
10eb803e26 | ||
|
|
bd2da8fdda | ||
|
|
b0bb082bec | ||
|
|
81f564ef7f | ||
|
|
006b52f3db | ||
|
|
16634e60e4 | ||
|
|
db8b73075b | ||
|
|
dc9f79959a | ||
|
|
88f6d00763 | ||
|
|
fd3a989a49 | ||
|
|
7580c6a549 | ||
|
|
9e6503d84b | ||
|
|
ee21f7fdd8 | ||
|
|
ecc295f484 | ||
|
|
c528fef5fc | ||
|
|
38f1bda105 | ||
|
|
d8a703503a | ||
|
|
dd9ba7c937 | ||
|
|
cf52c9610c | ||
|
|
e8d709e99a | ||
|
|
51fbedad69 | ||
|
|
953605fa73 | ||
|
|
30bfdd9c6d | ||
|
|
91ee6cb4c1 | ||
|
|
8ef6fac28e | ||
|
|
d9e3860123 | ||
|
|
515d1f0399 | ||
|
|
be549430c5 | ||
|
|
4744315d45 | ||
|
|
8eea19a539 | ||
|
|
b5801d84e0 | ||
|
|
165d04b1bb | ||
|
|
16d2160b52 | ||
|
|
e39c47b8e1 | ||
|
|
bb0785140b | ||
|
|
e33ad1a1e3 | ||
|
|
cd60c506cb | ||
|
|
b55925df28 | ||
|
|
3a9b8c309d | ||
|
|
bf07159522 | ||
|
|
f8ca4d041d | ||
|
|
9c6a55f7b0 | ||
|
|
2866d83a2f | ||
|
|
45e5d07581 | ||
|
|
3a264cb6bb | ||
|
|
e1b9d42771 | ||
|
|
1a086c37c0 | ||
|
|
54edba144c | ||
|
|
3ed3187ba9 | ||
|
|
b7cd81a7f5 | ||
|
|
e750c2d7f9 | ||
|
|
62f16131f3 | ||
|
|
2cbaf12d7c | ||
|
|
e2d555d404 | ||
|
|
c296b34174 | ||
|
|
e8f99f28ec | ||
|
|
250b1abc79 | ||
|
|
42b15ad93f | ||
|
|
6f7a714bae | ||
|
|
92618e5084 | ||
|
|
391a9242c3 | ||
|
|
e1b6dc3b54 | ||
|
|
04124cf978 | ||
|
|
e4e717b747 | ||
|
|
10d2a8f5ff | ||
|
|
899fa38805 | ||
|
|
2df878c953 | ||
|
|
78a5eedc8f | ||
|
|
364f961ee2 | ||
|
|
b21aa6add2 | ||
|
|
ec4abd1c2d | ||
|
|
9900ae92fb | ||
|
|
fa20daf6e5 | ||
|
|
44949c0559 | ||
|
|
e88711a017 | ||
|
|
32169ded18 | ||
|
|
4b2d566fd9 | ||
|
|
fb4b7a7ce5 | ||
|
|
18f27775b0 | ||
|
|
cb462ad05a | ||
|
|
1eec22ca1a | ||
|
|
c1f3c51f88 | ||
|
|
7dd556293f | ||
|
|
ee50131ce4 | ||
|
|
2cd47dbfaa | ||
|
|
e5296c1067 | ||
|
|
0f00f05e3d | ||
|
|
976a3b82e5 | ||
|
|
ba26d620c4 | ||
|
|
e45246a767 | ||
|
|
7336668f0c | ||
|
|
11701fb222 | ||
|
|
b1ec12c4e9 | ||
|
|
d0b0a0fc97 | ||
|
|
20774fca6b | ||
|
|
9c46b510d8 | ||
|
|
9d887b60a8 | ||
|
|
63d4de4e24 | ||
|
|
9224d2da06 | ||
|
|
a10692e2f1 | ||
|
|
b0c152a42e | ||
|
|
f44767e023 | ||
|
|
aadef0a57c | ||
|
|
777ee7ffe0 | ||
|
|
dcf1d0bdbc | ||
|
|
13b7111a42 | ||
|
|
09442d37a3 | ||
|
|
1e66bdb07f | ||
|
|
b423090db9 | ||
|
|
a32d94efbf | ||
|
|
31f8302849 | ||
|
|
6487756764 | ||
|
|
abb76ad867 | ||
|
|
cf4e7a96e8 | ||
|
|
0f414f71a3 | ||
|
|
2b700fdad8 | ||
|
|
06c5daa253 | ||
|
|
91aab6cbd1 | ||
|
|
f38a00ebdc | ||
|
|
0f73ea75ab | ||
|
|
8fe869afdb | ||
|
|
2d274c4f5b | ||
|
|
713a3239a4 | ||
|
|
a9e7958d44 | ||
|
|
f38e84053f | ||
|
|
7eb16b7ba0 | ||
|
|
5a3889d8ee | ||
|
|
2c52f4d0bb | ||
|
|
32c4231eb5 | ||
|
|
e9621f054d | ||
|
|
b41b62220c | ||
|
|
c89440cbca | ||
|
|
1aeb555a53 | ||
|
|
9aff3ec5d9 | ||
|
|
b4eaf0cfb5 | ||
|
|
199cb6082d | ||
|
|
37bfe26a6d | ||
|
|
3769386539 | ||
|
|
84a6e57f42 | ||
|
|
14d2474ee9 | ||
|
|
ca613ed80a | ||
|
|
bbcd458849 | ||
|
|
bc885894f8 | ||
|
|
bc83df6971 | ||
|
|
43f43c9f81 | ||
|
|
65f00defcf | ||
|
|
d716b81342 | ||
|
|
272ba445f6 | ||
|
|
d9816d8869 | ||
|
|
874736eb16 | ||
|
|
9c16ccbf81 | ||
|
|
40a3aa31dc | ||
|
|
90669b611b | ||
|
|
f10c7ac725 | ||
|
|
38dca3cd0d | ||
|
|
44bb35b168 | ||
|
|
9832caf503 | ||
|
|
0c4e8e306e | ||
|
|
075416eb9c | ||
|
|
4260fbbc32 | ||
|
|
0bec697a86 | ||
|
|
4ca6eef8fd | ||
|
|
a635dd9be2 | ||
|
|
14982011d9 | ||
|
|
65d852fdc9 | ||
|
|
d483c23c81 | ||
|
|
273b1e47ca | ||
|
|
5c5cdba4cd | ||
|
|
24674ea483 | ||
|
|
5d3a975e4c | ||
|
|
ad670c3c62 | ||
|
|
f9063484f3 | ||
|
|
5e2a07ad41 | ||
|
|
00a3e51a93 | ||
|
|
bc20468f28 | ||
|
|
c7ac50a805 | ||
|
|
f8cd7bc013 | ||
|
|
3469bf314b | ||
|
|
9636807819 | ||
|
|
455251d1d4 | ||
|
|
ec00ffae7f | ||
|
|
d969c7ad46 | ||
|
|
02bf769188 | ||
|
|
1c8a6b4f2a | ||
|
|
60fe5d6ffb | ||
|
|
327ae7437f | ||
|
|
795b9f2b9b | ||
|
|
54da069e68 | ||
|
|
bfc5cebac1 | ||
|
|
d46b1d48d8 | ||
|
|
96320e7761 | ||
|
|
e6472f0a81 | ||
|
|
816bbd9bbf | ||
|
|
da1123271d | ||
|
|
12f099260f | ||
|
|
35728ae208 | ||
|
|
7dba9cc798 | ||
|
|
bb1c920e22 | ||
|
|
770cbd7639 | ||
|
|
b43c9b8d93 | ||
|
|
935964c9b0 | ||
|
|
9aed9a5237 | ||
|
|
6c082c94c4 | ||
|
|
6c93b8304a | ||
|
|
85acf0fe78 | ||
|
|
fe64c6dd10 | ||
|
|
b15066a204 | ||
|
|
e2b29b6a07 | ||
|
|
7470ebe846 | ||
|
|
422efa68aa | ||
|
|
f4d8671922 | ||
|
|
70b136c9fc | ||
|
|
7adb5e93c7 | ||
|
|
a8b7117878 | ||
|
|
3bd99cd1e8 | ||
|
|
1d6af26857 | ||
|
|
14d18d731f | ||
|
|
c8d7cbbc2c | ||
|
|
ef4303fbbb | ||
|
|
2df9a9c69b | ||
|
|
6bb5d9f644 | ||
|
|
94b221248a | ||
|
|
2a887f5015 | ||
|
|
7e3dddf1bb | ||
|
|
fe95a7df2a | ||
|
|
98f6a30c6b | ||
|
|
00288053bf | ||
|
|
6a7feb98bd | ||
|
|
770d511067 | ||
|
|
b57fc1f818 | ||
|
|
01c5a8f07f | ||
|
|
243b8a3747 | ||
|
|
987fce7f07 | ||
|
|
657593be09 | ||
|
|
0afba7e3e3 | ||
|
|
ac00bb9029 | ||
|
|
67cb444d82 | ||
|
|
1914a29163 | ||
|
|
11201bc7c7 | ||
|
|
3a8b5257c0 | ||
|
|
00bb4e92dc | ||
|
|
7481c2ba0e | ||
|
|
9d336e257c | ||
|
|
2027171cb9 | ||
|
|
e06baf0d29 | ||
|
|
5d6a68a5bb | ||
|
|
41a5bb70bf | ||
|
|
c2ec62ba08 | ||
|
|
64361fc9ec | ||
|
|
ec55024734 | ||
|
|
df5c451a12 | ||
|
|
16c39a0a28 | ||
|
|
5e45b2cdd2 | ||
|
|
39210cf0c6 | ||
|
|
2e0e801533 | ||
|
|
0b9785a5d4 | ||
|
|
9070597c17 | ||
|
|
a5f986f145 | ||
|
|
1911afe3ee | ||
|
|
8b2145913c | ||
|
|
e75878d248 | ||
|
|
154d7cca78 | ||
|
|
3c018295b0 | ||
|
|
c8b8a4df11 | ||
|
|
edf74dc110 | ||
|
|
8d67fab958 |
61
.github/workflows/maven-publish.yml
vendored
61
.github/workflows/maven-publish.yml
vendored
@@ -1,64 +1,41 @@
|
||||
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
|
||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
|
||||
|
||||
name: Test and Publish Package
|
||||
name: Release Artifacts
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ "created" ]
|
||||
|
||||
#on:
|
||||
# push:
|
||||
# branches: [ "main" ]
|
||||
# workflow_dispatch:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'adopt-hotspot'
|
||||
java-version: '17'
|
||||
distribution: 'temurin'
|
||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||
|
||||
- name: Find and Replace
|
||||
uses: jacobtomlinson/gha-find-replace@v3
|
||||
with:
|
||||
find: "ollama4j-revision"
|
||||
replace: ${{ github.ref_name }}
|
||||
regex: false
|
||||
|
||||
- name: Build with Maven
|
||||
run: mvn --file pom.xml -U clean package -Punit-tests
|
||||
- name: Set up Apache Maven Central (Overwrite settings.xml)
|
||||
uses: actions/setup-java@v3
|
||||
with: # running setup-java again overwrites the settings.xml
|
||||
java-version: '11'
|
||||
distribution: 'adopt-hotspot'
|
||||
cache: 'maven'
|
||||
server-id: ossrh
|
||||
server-username: MAVEN_USERNAME
|
||||
server-password: MAVEN_PASSWORD
|
||||
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
gpg-passphrase: MAVEN_GPG_PASSPHRASE
|
||||
- name: Set up Maven cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-maven-
|
||||
- name: Build
|
||||
run: mvn -B -ntp clean install
|
||||
|
||||
- name: Publish to GitHub Packages Apache Maven
|
||||
# if: >
|
||||
# github.event_name != 'pull_request' &&
|
||||
# github.ref_name == 'main' &&
|
||||
# contains(github.event.head_commit.message, 'release')
|
||||
run: |
|
||||
git config --global user.email "koujalgi.amith@gmail.com"
|
||||
git config --global user.name "amithkoujalgi"
|
||||
mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
|
||||
run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml --file pom.xml
|
||||
env:
|
||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
|
||||
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
34
.github/workflows/publish-docs.yml
vendored
34
.github/workflows/publish-docs.yml
vendored
@@ -1,10 +1,9 @@
|
||||
# Simple workflow for deploying static content to GitHub Pages
|
||||
name: Deploy Javadoc content to Pages
|
||||
name: Deploy Docs to GH Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
@@ -30,6 +29,15 @@ jobs:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'adopt-hotspot'
|
||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v3
|
||||
@@ -38,8 +46,22 @@ jobs:
|
||||
- run: cd docs && npm ci
|
||||
- run: cd docs && npm run build
|
||||
|
||||
# - name: Build with npm
|
||||
# run: npm i && npm run build
|
||||
- name: Find and Replace
|
||||
uses: jacobtomlinson/gha-find-replace@v3
|
||||
with:
|
||||
find: "ollama4j-revision"
|
||||
replace: ${{ github.ref_name }}
|
||||
regex: false
|
||||
|
||||
- name: Build with Maven
|
||||
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
||||
|
||||
- name: Doxygen Action
|
||||
uses: mattnotmitt/doxygen-action@v1.1.0
|
||||
with:
|
||||
doxyfile-path: "./Doxyfile"
|
||||
working-directory: "."
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v3
|
||||
- name: Upload artifact
|
||||
|
||||
52
.github/workflows/publish-javadoc.yml
vendored
52
.github/workflows/publish-javadoc.yml
vendored
@@ -1,52 +0,0 @@
|
||||
# Simple workflow for deploying static content to GitHub Pages
|
||||
name: Deploy Javadoc content to Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: [ "none" ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
packages: write
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Single deploy job since we're just deploying
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '11'
|
||||
distribution: 'adopt-hotspot'
|
||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||
- name: Build with Maven
|
||||
run: mvn --file pom.xml -U clean package
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v3
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v2
|
||||
with:
|
||||
# Upload entire repository
|
||||
path: './target/apidocs/.'
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v2
|
||||
413
Doxyfile
Normal file
413
Doxyfile
Normal file
@@ -0,0 +1,413 @@
|
||||
# Doxyfile 1.10.0
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Project related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
PROJECT_NAME = "Ollama4j"
|
||||
PROJECT_NUMBER =
|
||||
PROJECT_BRIEF = "A Java library (wrapper/binding) for Ollama server."
|
||||
PROJECT_LOGO = ./logo-small.png
|
||||
PROJECT_ICON = ./logo-small.png
|
||||
OUTPUT_DIRECTORY = ./docs/build/doxygen
|
||||
CREATE_SUBDIRS = NO
|
||||
CREATE_SUBDIRS_LEVEL = 8
|
||||
ALLOW_UNICODE_NAMES = NO
|
||||
OUTPUT_LANGUAGE = English
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
REPEAT_BRIEF = YES
|
||||
ABBREVIATE_BRIEF = "The $name class" \
|
||||
"The $name widget" \
|
||||
"The $name file" \
|
||||
is \
|
||||
provides \
|
||||
specifies \
|
||||
contains \
|
||||
represents \
|
||||
a \
|
||||
an \
|
||||
the
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
INLINE_INHERITED_MEMB = NO
|
||||
FULL_PATH_NAMES = YES
|
||||
STRIP_FROM_PATH =
|
||||
STRIP_FROM_INC_PATH =
|
||||
SHORT_NAMES = NO
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
JAVADOC_BANNER = NO
|
||||
QT_AUTOBRIEF = NO
|
||||
MULTILINE_CPP_IS_BRIEF = NO
|
||||
PYTHON_DOCSTRING = YES
|
||||
INHERIT_DOCS = YES
|
||||
SEPARATE_MEMBER_PAGES = NO
|
||||
TAB_SIZE = 4
|
||||
ALIASES =
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
OPTIMIZE_OUTPUT_JAVA = YES
|
||||
OPTIMIZE_FOR_FORTRAN = NO
|
||||
OPTIMIZE_OUTPUT_VHDL = NO
|
||||
OPTIMIZE_OUTPUT_SLICE = NO
|
||||
EXTENSION_MAPPING =
|
||||
MARKDOWN_SUPPORT = YES
|
||||
TOC_INCLUDE_HEADINGS = 5
|
||||
MARKDOWN_ID_STYLE = DOXYGEN
|
||||
AUTOLINK_SUPPORT = YES
|
||||
BUILTIN_STL_SUPPORT = NO
|
||||
CPP_CLI_SUPPORT = NO
|
||||
SIP_SUPPORT = NO
|
||||
IDL_PROPERTY_SUPPORT = YES
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
GROUP_NESTED_COMPOUNDS = NO
|
||||
SUBGROUPING = YES
|
||||
INLINE_GROUPED_CLASSES = NO
|
||||
INLINE_SIMPLE_STRUCTS = NO
|
||||
TYPEDEF_HIDES_STRUCT = NO
|
||||
LOOKUP_CACHE_SIZE = 0
|
||||
NUM_PROC_THREADS = 1
|
||||
TIMESTAMP = NO
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
EXTRACT_ALL = YES
|
||||
EXTRACT_PRIVATE = NO
|
||||
EXTRACT_PRIV_VIRTUAL = NO
|
||||
EXTRACT_PACKAGE = NO
|
||||
EXTRACT_STATIC = NO
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
EXTRACT_LOCAL_METHODS = NO
|
||||
EXTRACT_ANON_NSPACES = NO
|
||||
RESOLVE_UNNAMED_PARAMS = YES
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
HIDE_FRIEND_COMPOUNDS = NO
|
||||
HIDE_IN_BODY_DOCS = NO
|
||||
INTERNAL_DOCS = NO
|
||||
CASE_SENSE_NAMES = SYSTEM
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
HIDE_COMPOUND_REFERENCE= NO
|
||||
SHOW_HEADERFILE = YES
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
SHOW_GROUPED_MEMB_INC = NO
|
||||
FORCE_LOCAL_INCLUDES = NO
|
||||
INLINE_INFO = YES
|
||||
SORT_MEMBER_DOCS = YES
|
||||
SORT_BRIEF_DOCS = NO
|
||||
SORT_MEMBERS_CTORS_1ST = NO
|
||||
SORT_GROUP_NAMES = NO
|
||||
SORT_BY_SCOPE_NAME = NO
|
||||
STRICT_PROTO_MATCHING = NO
|
||||
GENERATE_TODOLIST = YES
|
||||
GENERATE_TESTLIST = YES
|
||||
GENERATE_BUGLIST = YES
|
||||
GENERATE_DEPRECATEDLIST= YES
|
||||
ENABLED_SECTIONS =
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
SHOW_USED_FILES = YES
|
||||
SHOW_FILES = YES
|
||||
SHOW_NAMESPACES = YES
|
||||
FILE_VERSION_FILTER =
|
||||
LAYOUT_FILE =
|
||||
CITE_BIB_FILES =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
QUIET = NO
|
||||
WARNINGS = YES
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
WARN_IF_DOC_ERROR = YES
|
||||
WARN_IF_INCOMPLETE_DOC = YES
|
||||
WARN_NO_PARAMDOC = NO
|
||||
WARN_IF_UNDOC_ENUM_VAL = NO
|
||||
WARN_AS_ERROR = NO
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
WARN_LINE_FORMAT = "at line $line of file $file"
|
||||
WARN_LOGFILE =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
INPUT = ./src/main
|
||||
INPUT_ENCODING = UTF-8
|
||||
INPUT_FILE_ENCODING =
|
||||
FILE_PATTERNS = *.c \
|
||||
*.cc \
|
||||
*.cxx \
|
||||
*.cxxm \
|
||||
*.cpp \
|
||||
*.cppm \
|
||||
*.ccm \
|
||||
*.c++ \
|
||||
*.c++m \
|
||||
*.java \
|
||||
*.ii \
|
||||
*.ixx \
|
||||
*.ipp \
|
||||
*.i++ \
|
||||
*.inl \
|
||||
*.idl \
|
||||
*.ddl \
|
||||
*.odl \
|
||||
*.h \
|
||||
*.hh \
|
||||
*.hxx \
|
||||
*.hpp \
|
||||
*.h++ \
|
||||
*.ixx \
|
||||
*.l \
|
||||
*.cs \
|
||||
*.d \
|
||||
*.php \
|
||||
*.php4 \
|
||||
*.php5 \
|
||||
*.phtml \
|
||||
*.inc \
|
||||
*.m \
|
||||
*.markdown \
|
||||
*.md \
|
||||
*.mm \
|
||||
*.dox \
|
||||
*.py \
|
||||
*.pyw \
|
||||
*.f90 \
|
||||
*.f95 \
|
||||
*.f03 \
|
||||
*.f08 \
|
||||
*.f18 \
|
||||
*.f \
|
||||
*.for \
|
||||
*.vhd \
|
||||
*.vhdl \
|
||||
*.ucf \
|
||||
*.qsf \
|
||||
*.ice
|
||||
RECURSIVE = YES
|
||||
EXCLUDE =
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
EXCLUDE_PATTERNS =
|
||||
EXCLUDE_SYMBOLS =
|
||||
EXAMPLE_PATH =
|
||||
EXAMPLE_PATTERNS = *
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
IMAGE_PATH =
|
||||
INPUT_FILTER =
|
||||
FILTER_PATTERNS =
|
||||
FILTER_SOURCE_FILES = NO
|
||||
FILTER_SOURCE_PATTERNS =
|
||||
USE_MDFILE_AS_MAINPAGE =
|
||||
FORTRAN_COMMENT_AFTER = 72
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
SOURCE_BROWSER = YES
|
||||
INLINE_SOURCES = NO
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
REFERENCED_BY_RELATION = NO
|
||||
REFERENCES_RELATION = NO
|
||||
REFERENCES_LINK_SOURCE = YES
|
||||
SOURCE_TOOLTIPS = YES
|
||||
USE_HTAGS = NO
|
||||
VERBATIM_HEADERS = YES
|
||||
CLANG_ASSISTED_PARSING = NO
|
||||
CLANG_ADD_INC_PATHS = YES
|
||||
CLANG_OPTIONS =
|
||||
CLANG_DATABASE_PATH =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
ALPHABETICAL_INDEX = YES
|
||||
IGNORE_PREFIX =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_HTML = YES
|
||||
HTML_OUTPUT = html
|
||||
HTML_FILE_EXTENSION = .html
|
||||
HTML_HEADER =
|
||||
HTML_FOOTER =
|
||||
HTML_STYLESHEET =
|
||||
HTML_EXTRA_STYLESHEET =
|
||||
HTML_EXTRA_FILES =
|
||||
HTML_COLORSTYLE = LIGHT
|
||||
HTML_COLORSTYLE_HUE = 220
|
||||
HTML_COLORSTYLE_SAT = 100
|
||||
HTML_COLORSTYLE_GAMMA = 80
|
||||
HTML_DYNAMIC_MENUS = YES
|
||||
HTML_DYNAMIC_SECTIONS = NO
|
||||
HTML_CODE_FOLDING = YES
|
||||
HTML_COPY_CLIPBOARD = YES
|
||||
HTML_PROJECT_COOKIE =
|
||||
HTML_INDEX_NUM_ENTRIES = 100
|
||||
GENERATE_DOCSET = NO
|
||||
DOCSET_FEEDNAME = "Doxygen generated docs"
|
||||
DOCSET_FEEDURL =
|
||||
DOCSET_BUNDLE_ID = org.doxygen.Project
|
||||
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
|
||||
DOCSET_PUBLISHER_NAME = Publisher
|
||||
GENERATE_HTMLHELP = NO
|
||||
CHM_FILE =
|
||||
HHC_LOCATION =
|
||||
GENERATE_CHI = NO
|
||||
CHM_INDEX_ENCODING =
|
||||
BINARY_TOC = NO
|
||||
TOC_EXPAND = NO
|
||||
SITEMAP_URL =
|
||||
GENERATE_QHP = NO
|
||||
QCH_FILE =
|
||||
QHP_NAMESPACE = org.doxygen.Project
|
||||
QHP_VIRTUAL_FOLDER = doc
|
||||
QHP_CUST_FILTER_NAME =
|
||||
QHP_CUST_FILTER_ATTRS =
|
||||
QHP_SECT_FILTER_ATTRS =
|
||||
QHG_LOCATION =
|
||||
GENERATE_ECLIPSEHELP = NO
|
||||
ECLIPSE_DOC_ID = org.doxygen.Project
|
||||
DISABLE_INDEX = NO
|
||||
GENERATE_TREEVIEW = YES
|
||||
FULL_SIDEBAR = NO
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
TREEVIEW_WIDTH = 250
|
||||
EXT_LINKS_IN_WINDOW = NO
|
||||
OBFUSCATE_EMAILS = YES
|
||||
HTML_FORMULA_FORMAT = png
|
||||
FORMULA_FONTSIZE = 10
|
||||
FORMULA_MACROFILE =
|
||||
USE_MATHJAX = NO
|
||||
MATHJAX_VERSION = MathJax_2
|
||||
MATHJAX_FORMAT = HTML-CSS
|
||||
MATHJAX_RELPATH =
|
||||
MATHJAX_EXTENSIONS =
|
||||
MATHJAX_CODEFILE =
|
||||
SEARCHENGINE = YES
|
||||
SERVER_BASED_SEARCH = NO
|
||||
EXTERNAL_SEARCH = NO
|
||||
SEARCHENGINE_URL =
|
||||
SEARCHDATA_FILE = searchdata.xml
|
||||
EXTERNAL_SEARCH_ID =
|
||||
EXTRA_SEARCH_MAPPINGS =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_LATEX = YES
|
||||
LATEX_OUTPUT = latex
|
||||
LATEX_CMD_NAME =
|
||||
MAKEINDEX_CMD_NAME = makeindex
|
||||
LATEX_MAKEINDEX_CMD = makeindex
|
||||
COMPACT_LATEX = NO
|
||||
PAPER_TYPE = a4
|
||||
EXTRA_PACKAGES =
|
||||
LATEX_HEADER =
|
||||
LATEX_FOOTER =
|
||||
LATEX_EXTRA_STYLESHEET =
|
||||
LATEX_EXTRA_FILES =
|
||||
PDF_HYPERLINKS = YES
|
||||
USE_PDFLATEX = YES
|
||||
LATEX_BATCHMODE = NO
|
||||
LATEX_HIDE_INDICES = NO
|
||||
LATEX_BIB_STYLE = plain
|
||||
LATEX_EMOJI_DIRECTORY =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_RTF = NO
|
||||
RTF_OUTPUT = rtf
|
||||
COMPACT_RTF = NO
|
||||
RTF_HYPERLINKS = NO
|
||||
RTF_STYLESHEET_FILE =
|
||||
RTF_EXTENSIONS_FILE =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_MAN = NO
|
||||
MAN_OUTPUT = man
|
||||
MAN_EXTENSION = .3
|
||||
MAN_SUBDIR =
|
||||
MAN_LINKS = NO
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_XML = NO
|
||||
XML_OUTPUT = xml
|
||||
XML_PROGRAMLISTING = YES
|
||||
XML_NS_MEMB_FILE_SCOPE = NO
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the DOCBOOK output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_DOCBOOK = NO
|
||||
DOCBOOK_OUTPUT = docbook
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to Sqlite3 output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_SQLITE3 = NO
|
||||
SQLITE3_OUTPUT = sqlite3
|
||||
SQLITE3_RECREATE_DB = YES
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the Perl module output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_PERLMOD = NO
|
||||
PERLMOD_LATEX = NO
|
||||
PERLMOD_PRETTY = YES
|
||||
PERLMOD_MAKEVAR_PREFIX =
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
ENABLE_PREPROCESSING = YES
|
||||
MACRO_EXPANSION = NO
|
||||
EXPAND_ONLY_PREDEF = NO
|
||||
SEARCH_INCLUDES = YES
|
||||
INCLUDE_PATH =
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
PREDEFINED =
|
||||
EXPAND_AS_DEFINED =
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
TAGFILES =
|
||||
GENERATE_TAGFILE =
|
||||
ALLEXTERNALS = NO
|
||||
EXTERNAL_GROUPS = YES
|
||||
EXTERNAL_PAGES = YES
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to diagram generator tools
|
||||
#---------------------------------------------------------------------------
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
HAVE_DOT = NO
|
||||
DOT_NUM_THREADS = 0
|
||||
DOT_COMMON_ATTR = "fontname=Helvetica,fontsize=10"
|
||||
DOT_EDGE_ATTR = "labelfontname=Helvetica,labelfontsize=10"
|
||||
DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
|
||||
DOT_FONTPATH =
|
||||
CLASS_GRAPH = YES
|
||||
COLLABORATION_GRAPH = YES
|
||||
GROUP_GRAPHS = YES
|
||||
UML_LOOK = NO
|
||||
UML_LIMIT_NUM_FIELDS = 10
|
||||
DOT_UML_DETAILS = NO
|
||||
DOT_WRAP_THRESHOLD = 17
|
||||
TEMPLATE_RELATIONS = NO
|
||||
INCLUDE_GRAPH = YES
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
CALL_GRAPH = NO
|
||||
CALLER_GRAPH = NO
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
DIRECTORY_GRAPH = YES
|
||||
DIR_GRAPH_MAX_DEPTH = 1
|
||||
DOT_IMAGE_FORMAT = png
|
||||
INTERACTIVE_SVG = NO
|
||||
DOT_PATH =
|
||||
DOTFILE_DIRS =
|
||||
DIA_PATH =
|
||||
DIAFILE_DIRS =
|
||||
PLANTUML_JAR_PATH =
|
||||
PLANTUML_CFG_FILE =
|
||||
PLANTUML_INCLUDE_PATH =
|
||||
DOT_GRAPH_MAX_NODES = 50
|
||||
MAX_DOT_GRAPH_DEPTH = 0
|
||||
DOT_MULTI_TARGETS = NO
|
||||
GENERATE_LEGEND = YES
|
||||
DOT_CLEANUP = YES
|
||||
MSCGEN_TOOL =
|
||||
MSCFILE_DIRS =
|
||||
17
Makefile
17
Makefile
@@ -7,7 +7,22 @@ ut:
|
||||
it:
|
||||
mvn clean verify -Pintegration-tests
|
||||
|
||||
doxygen:
|
||||
doxygen Doxyfile
|
||||
|
||||
list-releases:
|
||||
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
||||
--compressed \
|
||||
--silent | jq '.components[].version'
|
||||
--silent | jq '.components[].version'
|
||||
|
||||
build-docs:
|
||||
npm i --prefix docs && npm run build --prefix docs
|
||||
|
||||
start-docs:
|
||||
npm i --prefix docs && npm run start --prefix docs
|
||||
|
||||
start-cpu:
|
||||
docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||
|
||||
start-gpu:
|
||||
docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
slug: first-blog-post
|
||||
title: First Blog Post
|
||||
authors:
|
||||
name: Gao Wei
|
||||
title: Docusaurus Core Team
|
||||
url: https://github.com/wgao19
|
||||
image_url: https://github.com/wgao19.png
|
||||
tags: [hola, docusaurus]
|
||||
---
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
@@ -1,44 +0,0 @@
|
||||
---
|
||||
slug: long-blog-post
|
||||
title: Long Blog Post
|
||||
authors: endi
|
||||
tags: [hello, docusaurus]
|
||||
---
|
||||
|
||||
This is the summary of a very long blog post,
|
||||
|
||||
Use a `<!--` `truncate` `-->` comment to limit blog post size in the list view.
|
||||
|
||||
<!--truncate-->
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
slug: mdx-blog-post
|
||||
title: MDX Blog Post
|
||||
authors: [slorber]
|
||||
tags: [docusaurus]
|
||||
---
|
||||
|
||||
Blog posts support [Docusaurus Markdown features](https://docusaurus.io/docs/markdown-features), such as [MDX](https://mdxjs.com/).
|
||||
|
||||
:::tip
|
||||
|
||||
Use the power of React to create interactive blog posts.
|
||||
|
||||
```js
|
||||
<button onClick={() => alert('button clicked!')}>Click me!</button>
|
||||
```
|
||||
|
||||
<button onClick={() => alert('button clicked!')}>Click me!</button>
|
||||
|
||||
:::
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 94 KiB |
@@ -1,25 +0,0 @@
|
||||
---
|
||||
slug: welcome
|
||||
title: Welcome
|
||||
authors: [slorber, yangshun]
|
||||
tags: [facebook, hello, docusaurus]
|
||||
---
|
||||
|
||||
[Docusaurus blogging features](https://docusaurus.io/docs/blog) are powered by the [blog plugin](https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-content-blog).
|
||||
|
||||
Simply add Markdown files (or folders) to the `blog` directory.
|
||||
|
||||
Regular blog authors can be added to `authors.yml`.
|
||||
|
||||
The blog post date can be extracted from filenames, such as:
|
||||
|
||||
- `2019-05-30-welcome.md`
|
||||
- `2019-05-30-welcome/index.md`
|
||||
|
||||
A blog post folder can be convenient to co-locate blog post images:
|
||||
|
||||

|
||||
|
||||
The blog supports tags as well!
|
||||
|
||||
**And if you don't want a blog**: just delete this directory, and use `blog: false` in your Docusaurus config.
|
||||
9
docs/blog/2023-12-01-welcome/index.md
Normal file
9
docs/blog/2023-12-01-welcome/index.md
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
slug: welcome
|
||||
title: Welcome
|
||||
authors: [ amith ]
|
||||
tags: [ Java, AI, LLM, GenAI, GenerativeAI, Ollama, Ollama4J, OpenSource, Developers
|
||||
]
|
||||
---
|
||||
|
||||
Welcome Java Developers!
|
||||
66
docs/blog/2023-12-22-release-post.md
Normal file
66
docs/blog/2023-12-22-release-post.md
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
slug: release-post
|
||||
title: Release
|
||||
authors: [ amith ]
|
||||
tags: [ Java, AI, LLM, GenAI, GenerativeAI, Ollama, Ollama4j, OpenSource, Developers
|
||||
]
|
||||
---
|
||||
|
||||
Hey there, my fellow Java Developers! 🚀
|
||||
|
||||
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
|
||||
applications! 🌐🚀
|
||||
|
||||
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
|
||||
|
||||
🌟 Key Features:
|
||||
|
||||
- Easy integration with Ollama, enabling the execution of large language models locally.
|
||||
- Clean and simple APIs, focused on seamless interaction with Ollama.
|
||||
- Empowers Java developers to harness the full capabilities of Ollama.
|
||||
- Provides APIs to perform operations such as listing, pulling, deleting models, and creating custom models.
|
||||
- Provides APIs to ask questions (generate completions) to the LLMs in synchronous and asynchronous modes.
|
||||
- Ability to ask questions along with image files or image URLs! 🤩
|
||||
- Open-source and primed for collaborative contributions from the community!
|
||||
|
||||
🦙 What is Ollama?
|
||||
|
||||
Ollama is an advanced AI tool that allows users to easily set up and run large language models locally (in CPU and GPU
|
||||
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
|
||||
models.
|
||||
|
||||
For more details about Ollama, check these out:
|
||||
|
||||
- https://ollama.ai/
|
||||
- https://www.linkedin.com/company/ollama/
|
||||
|
||||
👨💻 Why Ollama4j?
|
||||
|
||||
As a Java developer passionate about harnessing the latest advancements in AI, I realized the need for a simple and
|
||||
efficient way to integrate Ollama into Java applications. That's why I authored Ollama4j!
|
||||
|
||||
🔧 How to Get Started:
|
||||
|
||||
Visit the GitHub repository: Ollama4j on GitHub.
|
||||
|
||||
Follow the easy setup instructions in the README to integrate Ollama into your Java projects.
|
||||
|
||||
Start unlocking the potential of large language models in your applications!
|
||||
|
||||
🙏 Contributions Welcome:
|
||||
|
||||
I invite the Java developer community to explore, use, and contribute to Ollama4j. Your feedback, suggestions, and
|
||||
contributions will help this library get better.
|
||||
|
||||
I am excited about the possibilities that Ollama4j opens up for Java developers. Whether you're working on natural
|
||||
language processing, chatbots, or any application that can benefit from advanced language models, Ollama4j is here to
|
||||
elevate your projects.
|
||||
|
||||
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
|
||||
|
||||
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/
|
||||
|
||||
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/
|
||||
|
||||
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).
|
||||
|
||||
@@ -1,17 +1,5 @@
|
||||
endi:
|
||||
name: Endilie Yacop Sucipto
|
||||
title: Maintainer of Docusaurus
|
||||
url: https://github.com/endiliey
|
||||
image_url: https://github.com/endiliey.png
|
||||
|
||||
yangshun:
|
||||
name: Yangshun Tay
|
||||
title: Front End Engineer @ Facebook
|
||||
url: https://github.com/yangshun
|
||||
image_url: https://github.com/yangshun.png
|
||||
|
||||
slorber:
|
||||
name: Sébastien Lorber
|
||||
title: Docusaurus maintainer
|
||||
url: https://sebastienlorber.com
|
||||
image_url: https://github.com/slorber.png
|
||||
amith:
|
||||
name: Amith Koujalgi
|
||||
title: Maintainer of Ollama4j
|
||||
url: https://github.com/amithkoujalgi
|
||||
image_url: https://github.com/amithkoujalgi.png
|
||||
|
||||
0
docs/build.sh
Normal file → Executable file
0
docs/build.sh
Normal file → Executable file
8
docs/docs/apis-extras/_category_.json
Normal file
8
docs/docs/apis-extras/_category_.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"label": "APIs - Extras",
|
||||
"position": 4,
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "Details of APIs to handle bunch of extra stuff."
|
||||
}
|
||||
}
|
||||
24
docs/docs/apis-extras/basic-auth.md
Normal file
24
docs/docs/apis-extras/basic-auth.md
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Set Basic Authentication
|
||||
|
||||
This API lets you set the basic authentication for the Ollama client. This would help in scenarios where
|
||||
Ollama server would be setup behind a gateway/reverse proxy with basic auth.
|
||||
|
||||
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setBasicAuth("username", "password");
|
||||
}
|
||||
}
|
||||
```
|
||||
78
docs/docs/apis-extras/options-builder.md
Normal file
78
docs/docs/apis-extras/options-builder.md
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Options Builder
|
||||
|
||||
This lets you build options for the `ask()` API.
|
||||
|
||||
Following are the parameters supported by Ollama:
|
||||
|
||||
| Parameter | Description | Value Type | Example Usage |
|
||||
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
|
||||
| mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 |
|
||||
| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 |
|
||||
| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 |
|
||||
| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 |
|
||||
| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |
|
||||
| num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |
|
||||
| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 |
|
||||
| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 |
|
||||
| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 |
|
||||
| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 |
|
||||
| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 |
|
||||
| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |
|
||||
| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 |
|
||||
| num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 |
|
||||
| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 |
|
||||
| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 |
|
||||
|
||||
Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||
|
||||
Also, see how to set those Ollama parameters using
|
||||
the `OptionsBuilder`
|
||||
from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html).
|
||||
|
||||
## Build an empty `Options` object
|
||||
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
Options options = new OptionsBuilder().build();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Build the `Options` object with values
|
||||
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
Options options =
|
||||
new OptionsBuilder()
|
||||
.setMirostat(10)
|
||||
.setMirostatEta(0.5f)
|
||||
.setNumGpu(2)
|
||||
.setTemperature(1.5f)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
```
|
||||
20
docs/docs/apis-extras/ping.md
Normal file
20
docs/docs/apis-extras/ping.md
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Ping
|
||||
|
||||
This API lets you check the reachability of Ollama server.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.ping();
|
||||
}
|
||||
}
|
||||
```
|
||||
21
docs/docs/apis-extras/request-timeout.md
Normal file
21
docs/docs/apis-extras/request-timeout.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Set Request Timeout
|
||||
|
||||
This API lets you set the request timeout for the Ollama client.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
}
|
||||
}
|
||||
```
|
||||
23
docs/docs/apis-extras/verbosity.md
Normal file
23
docs/docs/apis-extras/verbosity.md
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Set Verbosity
|
||||
|
||||
This API lets you set the verbosity of the Ollama client.
|
||||
|
||||
## Try asking a question about the model.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setVerbose(true);
|
||||
}
|
||||
}
|
||||
```
|
||||
8
docs/docs/apis-generate/_category_.json
Normal file
8
docs/docs/apis-generate/_category_.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"label": "APIs - Generate",
|
||||
"position": 3,
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "Details of APIs to interact with LLMs."
|
||||
}
|
||||
}
|
||||
205
docs/docs/apis-generate/chat.md
Normal file
205
docs/docs/apis-generate/chat.md
Normal file
@@ -0,0 +1,205 @@
|
||||
---
|
||||
sidebar_position: 7
|
||||
---
|
||||
|
||||
# Chat
|
||||
|
||||
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
|
||||
information using the history of already asked questions and the respective answers.
|
||||
|
||||
## Create a new conversation and use chat history to augment follow up questions
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||
|
||||
// create first user question
|
||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||
.build();
|
||||
|
||||
// start conversation with model
|
||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||
|
||||
System.out.println("First answer: " + chatResult.getResponse());
|
||||
|
||||
// create next userQuestion
|
||||
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
|
||||
|
||||
// "continue" conversation with model
|
||||
chatResult = ollamaAPI.chat(requestModel);
|
||||
|
||||
System.out.println("Second answer: " + chatResult.getResponse());
|
||||
|
||||
System.out.println("Chat History: " + chatResult.getChatHistory());
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> First answer: Should be Paris!
|
||||
>
|
||||
> Second answer: Marseille.
|
||||
>
|
||||
> Chat History:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?",
|
||||
"images": []
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "Should be Paris!",
|
||||
"images": []
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "And what is the second largest city?",
|
||||
"images": []
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "Marseille.",
|
||||
"images": []
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Create a conversation where the answer is streamed
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
|
||||
"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||
.build();
|
||||
|
||||
// define a handler (Consumer<String>)
|
||||
OllamaStreamHandler streamHandler = (s) -> {
|
||||
System.out.println(s);
|
||||
};
|
||||
|
||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> The
|
||||
> The capital
|
||||
> The capital of
|
||||
> The capital of France
|
||||
> The capital of France is
|
||||
> The capital of France is Paris
|
||||
> The capital of France is Paris.
|
||||
|
||||
## Use a simple Console Output Stream Handler
|
||||
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) throws Exception {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
|
||||
.build();
|
||||
OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
|
||||
ollamaAPI.chat(requestModel, streamHandler);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Create a new conversation with individual system prompt
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||
|
||||
// create request with system-prompt (overriding the model defaults) and user question
|
||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||
.withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||
.build();
|
||||
|
||||
// start conversation with model
|
||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||
|
||||
System.out.println(chatResult.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> NI.
|
||||
|
||||
## Create a conversation about an image (requires model with image recognition skills)
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
|
||||
|
||||
// Load Image from File and attach to user message (alternatively images could also be added via URL)
|
||||
OllamaChatRequestModel requestModel =
|
||||
builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
|
||||
List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
|
||||
|
||||
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||
System.out.println("First answer: " + chatResult.getResponse());
|
||||
|
||||
builder.reset();
|
||||
|
||||
// Use history to ask further questions about the image or assistant answer
|
||||
requestModel =
|
||||
builder.withMessages(chatResult.getChatHistory())
|
||||
.withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
|
||||
|
||||
chatResult = ollamaAPI.chat(requestModel);
|
||||
System.out.println("Second answer: " + chatResult.getResponse());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
|
||||
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
|
||||
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
|
||||
> evening, given the warm lighting and the low position of the sun in the sky.
|
||||
>
|
||||
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
|
||||
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
|
||||
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
|
||||
> confidently.
|
||||
46
docs/docs/apis-generate/generate-async.md
Normal file
46
docs/docs/apis-generate/generate-async.md
Normal file
@@ -0,0 +1,46 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Generate - Async
|
||||
|
||||
This API lets you ask questions to the LLMs in a asynchronous way.
|
||||
This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
|
||||
background (such as threads) without blocking your code until the response arrives from the model.
|
||||
|
||||
This API corresponds to
|
||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||
String prompt = "List all cricket world cup teams of 2019.";
|
||||
OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
|
||||
|
||||
// Set the poll interval according to your needs.
|
||||
// Smaller the poll interval, more frequently you receive the tokens.
|
||||
int pollIntervalMilliseconds = 1000;
|
||||
|
||||
while (true) {
|
||||
String tokens = streamer.getStream().poll();
|
||||
System.out.print(tokens);
|
||||
if (!streamer.isAlive()) {
|
||||
break;
|
||||
}
|
||||
Thread.sleep(pollIntervalMilliseconds);
|
||||
}
|
||||
|
||||
System.out.println("\n------------------------");
|
||||
System.out.println("Complete Response:");
|
||||
System.out.println("------------------------");
|
||||
|
||||
System.out.println(streamer.getResult());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a steaming response.
|
||||
46
docs/docs/apis-generate/generate-embeddings.md
Normal file
46
docs/docs/apis-generate/generate-embeddings.md
Normal file
@@ -0,0 +1,46 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# Generate Embeddings
|
||||
|
||||
Generate embeddings from a model.
|
||||
|
||||
Parameters:
|
||||
|
||||
- `model`: name of model to generate embeddings from
|
||||
- `prompt`: text to generate embeddings for
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
List<Double> embeddings = ollamaAPI.generateEmbeddings(OllamaModelType.LLAMA2,
|
||||
"Here is an article about llamas...");
|
||||
|
||||
embeddings.forEach(System.out::println);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
```javascript
|
||||
[
|
||||
0.5670403838157654,
|
||||
0.009260174818336964,
|
||||
0.23178744316101074,
|
||||
-0.2916173040866852,
|
||||
-0.8924556970596313,
|
||||
0.8785552978515625,
|
||||
-0.34576427936553955,
|
||||
0.5742510557174683,
|
||||
-0.04222835972905159,
|
||||
-0.137906014919281
|
||||
]
|
||||
```
|
||||
44
docs/docs/apis-generate/generate-with-image-files.md
Normal file
44
docs/docs/apis-generate/generate-with-image-files.md
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Generate - With Image Files
|
||||
|
||||
This API lets you ask questions along with the image files to the LLMs.
|
||||
This API corresponds to
|
||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||
|
||||
:::note
|
||||
|
||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||
recommended.
|
||||
|
||||
:::
|
||||
|
||||
## Synchronous mode
|
||||
|
||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
||||
|
||||

|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
|
||||
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
|
||||
"What's in this image?",
|
||||
List.of(
|
||||
new File("/path/to/image")));
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to
|
||||
> be enjoying its time outdoors, perhaps on a lake.
|
||||
44
docs/docs/apis-generate/generate-with-image-urls.md
Normal file
44
docs/docs/apis-generate/generate-with-image-urls.md
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Generate - With Image URLs
|
||||
|
||||
This API lets you ask questions along with the image files to the LLMs.
|
||||
This API corresponds to
|
||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||
|
||||
:::note
|
||||
|
||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
|
||||
recommended.
|
||||
|
||||
:::
|
||||
|
||||
## Ask (Sync)
|
||||
|
||||
Passing the link of this image the following code:
|
||||
|
||||

|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
|
||||
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
|
||||
"What's in this image?",
|
||||
List.of(
|
||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to
|
||||
> be enjoying its time outdoors, perhaps on a lake.
|
||||
367
docs/docs/apis-generate/generate-with-tools.md
Normal file
367
docs/docs/apis-generate/generate-with-tools.md
Normal file
@@ -0,0 +1,367 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Generate - With Tools
|
||||
|
||||
This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
|
||||
synchronous way.
|
||||
This API corresponds to
|
||||
the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
|
||||
|
||||
:::note
|
||||
|
||||
This is an only an experimental implementation and has a very basic design.
|
||||
|
||||
Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
|
||||
this
|
||||
in the future if tooling is supported for more models with a generic interaction standard from Ollama.
|
||||
|
||||
:::
|
||||
|
||||
### Function Calling/Tools
|
||||
|
||||
Assume you want to call a method in your code based on the response generated from the model.
|
||||
For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
|
||||
transaction from your database and respond to the user with the transaction details.
|
||||
|
||||
You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
|
||||
|
||||
### Create Functions
|
||||
|
||||
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
|
||||
fuel price value.
|
||||
|
||||
```java
|
||||
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||
String location = arguments.get("location").toString();
|
||||
String fuelType = arguments.get("fuelType").toString();
|
||||
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||
}
|
||||
```
|
||||
|
||||
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
|
||||
location.
|
||||
|
||||
```java
|
||||
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||
String location = arguments.get("city").toString();
|
||||
return "Currently " + location + "'s weather is nice.";
|
||||
}
|
||||
```
|
||||
|
||||
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
|
||||
details.
|
||||
|
||||
```java
|
||||
class DBQueryFunction implements ToolFunction {
|
||||
@Override
|
||||
public Object apply(Map<String, Object> arguments) {
|
||||
// perform DB operations here
|
||||
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Define Tool Specifications
|
||||
|
||||
Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
|
||||
|
||||
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
|
||||
- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
|
||||
|
||||
```java
|
||||
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("current-fuel-price")
|
||||
.functionDescription("Get current fuel price")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||
.build();
|
||||
```
|
||||
|
||||
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
|
||||
|
||||
- Specify the function `name`, `description`, and `required` property (`city`).
|
||||
- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
|
||||
|
||||
```java
|
||||
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("current-weather")
|
||||
.functionDescription("Get current weather")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(SampleTools::getCurrentWeather)
|
||||
.build();
|
||||
```
|
||||
|
||||
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
|
||||
|
||||
- Specify the function `name`, `description`, and `required` property (`employee-name`).
|
||||
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
|
||||
|
||||
```java
|
||||
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("get-employee-details")
|
||||
.functionDescription("Get employee details from the database")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(new DBQueryFunction())
|
||||
.build();
|
||||
```
|
||||
|
||||
### Register the Tools
|
||||
|
||||
Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
|
||||
|
||||
```shell
|
||||
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||
ollamaAPI.registerTool(weatherToolSpecification);
|
||||
```
|
||||
|
||||
### Create prompt with Tools
|
||||
|
||||
`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
|
||||
|
||||
```shell
|
||||
String prompt1 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withPrompt("What is the petrol price in Bengaluru?")
|
||||
.build();
|
||||
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
|
||||
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||
}
|
||||
```
|
||||
|
||||
Now, fire away your question to the model.
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
::::tip[LLM Response]
|
||||
|
||||
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||
|
||||
::::
|
||||
|
||||
`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
|
||||
|
||||
```shell
|
||||
String prompt2 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withPrompt("What is the current weather in Bengaluru?")
|
||||
.build();
|
||||
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
|
||||
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||
}
|
||||
```
|
||||
|
||||
Again, fire away your question to the model.
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
::::tip[LLM Response]
|
||||
|
||||
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||
|
||||
::::
|
||||
|
||||
`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
|
||||
|
||||
```shell
|
||||
String prompt3 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withToolSpecification(databaseQueryToolSpecification)
|
||||
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||
.build();
|
||||
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
|
||||
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||
}
|
||||
```
|
||||
|
||||
Again, fire away your question to the model.
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
::::tip[LLM Response]
|
||||
|
||||
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||
|
||||
::::
|
||||
|
||||
### Full Example
|
||||
|
||||
```java
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction;
|
||||
import io.github.amithkoujalgi.ollama4j.core.tools.Tools;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
public class FunctionCallingWithMistralExample {
|
||||
public static void main(String[] args) throws Exception {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(60);
|
||||
|
||||
String model = "mistral";
|
||||
|
||||
Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("current-fuel-price")
|
||||
.functionDescription("Get current fuel price")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||
.withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(SampleTools::getCurrentFuelPrice)
|
||||
.build();
|
||||
|
||||
Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("current-weather")
|
||||
.functionDescription("Get current weather")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(SampleTools::getCurrentWeather)
|
||||
.build();
|
||||
|
||||
Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
|
||||
.functionName("get-employee-details")
|
||||
.functionDescription("Get employee details from the database")
|
||||
.properties(
|
||||
new Tools.PropsBuilder()
|
||||
.withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
|
||||
.withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
|
||||
.withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
|
||||
.build()
|
||||
)
|
||||
.toolDefinition(new DBQueryFunction())
|
||||
.build();
|
||||
|
||||
ollamaAPI.registerTool(fuelPriceToolSpecification);
|
||||
ollamaAPI.registerTool(weatherToolSpecification);
|
||||
ollamaAPI.registerTool(databaseQueryToolSpecification);
|
||||
|
||||
String prompt1 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withPrompt("What is the petrol price in Bengaluru?")
|
||||
.build();
|
||||
ask(ollamaAPI, model, prompt1);
|
||||
|
||||
String prompt2 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withPrompt("What is the current weather in Bengaluru?")
|
||||
.build();
|
||||
ask(ollamaAPI, model, prompt2);
|
||||
|
||||
String prompt3 = new Tools.PromptBuilder()
|
||||
.withToolSpecification(fuelPriceToolSpecification)
|
||||
.withToolSpecification(weatherToolSpecification)
|
||||
.withToolSpecification(databaseQueryToolSpecification)
|
||||
.withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
|
||||
.build();
|
||||
ask(ollamaAPI, model, prompt3);
|
||||
}
|
||||
|
||||
public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
|
||||
OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
|
||||
for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
|
||||
System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class SampleTools {
|
||||
public static String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||
// Get details from fuel price API
|
||||
String location = arguments.get("location").toString();
|
||||
String fuelType = arguments.get("fuelType").toString();
|
||||
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||
}
|
||||
|
||||
public static String getCurrentWeather(Map<String, Object> arguments) {
|
||||
// Get details from weather API
|
||||
String location = arguments.get("city").toString();
|
||||
return "Currently " + location + "'s weather is nice.";
|
||||
}
|
||||
}
|
||||
|
||||
class DBQueryFunction implements ToolFunction {
|
||||
@Override
|
||||
public Object apply(Map<String, Object> arguments) {
|
||||
// perform DB operations here
|
||||
return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Run this full example and you will get a response similar to:
|
||||
|
||||
::::tip[LLM Response]
|
||||
|
||||
[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
|
||||
|
||||
[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
|
||||
|
||||
[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
|
||||
Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
|
||||
|
||||
::::
|
||||
|
||||
### Room for improvement
|
||||
|
||||
Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
|
||||
registration. For example:
|
||||
|
||||
```java
|
||||
|
||||
@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
|
||||
public String getCurrentFuelPrice(Map<String, Object> arguments) {
|
||||
String location = arguments.get("location").toString();
|
||||
String fuelType = arguments.get("fuelType").toString();
|
||||
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||
}
|
||||
```
|
||||
|
||||
Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
|
||||
specific args separately with their data types. For example:
|
||||
|
||||
```shell
|
||||
public String getCurrentFuelPrice(String location, String fuelType) {
|
||||
return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
|
||||
}
|
||||
```
|
||||
|
||||
Updating async/chat APIs with support for tool-based generation.
|
||||
154
docs/docs/apis-generate/generate.md
Normal file
154
docs/docs/apis-generate/generate.md
Normal file
@@ -0,0 +1,154 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Generate - Sync
|
||||
|
||||
This API lets you ask questions to the LLMs in a synchronous way.
|
||||
This API corresponds to
|
||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
|
||||
|
||||
Use the `OptionBuilder` to build the `Options` object
|
||||
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
|
||||
Refer
|
||||
to [this](/apis-extras/options-builder).
|
||||
|
||||
## Try asking a question about the model.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaResult result =
|
||||
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
||||
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
|
||||
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
|
||||
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
|
||||
> require
|
||||
> natural language understanding and generation capabilities.
|
||||
|
||||
## Try asking a question, receiving the answer streamed
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
// define a stream handler (Consumer<String>)
|
||||
OllamaStreamHandler streamHandler = (s) -> {
|
||||
System.out.println(s);
|
||||
};
|
||||
|
||||
// Should be called using seperate thread to gain non blocking streaming effect.
|
||||
OllamaResult result = ollamaAPI.generate(config.getModel(),
|
||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||
new OptionsBuilder().build(), streamHandler);
|
||||
|
||||
System.out.println("Full response: " + result.getResponse());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
> The
|
||||
> The capital
|
||||
> The capital of
|
||||
> The capital of France
|
||||
> The capital of France is
|
||||
> The capital of France is Paris
|
||||
> The capital of France is Paris.
|
||||
> Full response: The capital of France is Paris.
|
||||
|
||||
## Try asking a question from general topics.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
String prompt = "List all cricket world cup teams of 2019.";
|
||||
|
||||
OllamaResult result =
|
||||
ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
||||
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You'd then get a response from the model:
|
||||
|
||||
> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
|
||||
> following teams
|
||||
> participated in the tournament:
|
||||
>
|
||||
> 1. Afghanistan
|
||||
> 2. Australia
|
||||
> 3. Bangladesh
|
||||
> 4. England
|
||||
> 5. India
|
||||
> 6. New Zealand
|
||||
> 7. Pakistan
|
||||
> 8. South Africa
|
||||
> 9. Sri Lanka
|
||||
> 10. West Indies
|
||||
>
|
||||
> These teams competed in a round-robin format, with the top four teams advancing to the
|
||||
> semi-finals. The tournament was
|
||||
> won by the England cricket team, who defeated New Zealand in the final.
|
||||
|
||||
## Try asking for a Database query for your data schema.
|
||||
|
||||
```java
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
String prompt =
|
||||
SamplePrompts.getSampleDatabasePromptWithQuestion(
|
||||
"List all customer names who have bought one or more products");
|
||||
OllamaResult result =
|
||||
ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
||||
System.out.println(result.getResponse());
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
_Note: Here I've used
|
||||
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
|
||||
containing a database schema from within this library for demonstration purposes._
|
||||
|
||||
You'd then get a response from the model:
|
||||
|
||||
```sql
|
||||
SELECT customers.name
|
||||
FROM sales
|
||||
JOIN customers ON sales.customer_id = customers.customer_id
|
||||
GROUP BY customers.name;
|
||||
```
|
||||
73
docs/docs/apis-generate/prompt-builder.md
Normal file
73
docs/docs/apis-generate/prompt-builder.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# Prompt Builder
|
||||
|
||||
This is designed for prompt engineering. It allows you to easily build the prompt text for zero-shot, one-shot, few-shot
|
||||
inferences.
|
||||
|
||||
```java
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
|
||||
|
||||
public class AskPhi {
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||
|
||||
String model = OllamaModelType.PHI;
|
||||
|
||||
PromptBuilder promptBuilder =
|
||||
new PromptBuilder()
|
||||
.addLine("You are an expert coder and understand different programming languages.")
|
||||
.addLine("Given a question, answer ONLY with code.")
|
||||
.addLine("Produce clean, formatted and indented code in markdown format.")
|
||||
.addLine(
|
||||
"DO NOT include ANY extra text apart from code. Follow this instruction very strictly!")
|
||||
.addLine("If there's any additional information you want to add, use comments within code.")
|
||||
.addLine("Answer only in the programming language that has been asked for.")
|
||||
.addSeparator()
|
||||
.addLine("Example: Sum 2 numbers in Python")
|
||||
.addLine("Answer:")
|
||||
.addLine("```python")
|
||||
.addLine("def sum(num1: int, num2: int) -> int:")
|
||||
.addLine(" return num1 + num2")
|
||||
.addLine("```")
|
||||
.addSeparator()
|
||||
.add("How do I read a file in Go and print its contents to stdout?");
|
||||
|
||||
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
|
||||
System.out.println(response.getResponse());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
func readFile(fileName string) {
|
||||
file, err := ioutil.ReadFile(fileName)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, "Error reading file:", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
f, _ := ioutil.ReadFile("file.txt")
|
||||
if f != nil {
|
||||
fmt.Println(f.String())
|
||||
}
|
||||
}
|
||||
```
|
||||
8
docs/docs/apis-model-management/_category_.json
Normal file
8
docs/docs/apis-model-management/_category_.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"label": "APIs - Model Management",
|
||||
"position": 2,
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "Details of APIs to manage LLMs."
|
||||
}
|
||||
}
|
||||
160
docs/docs/apis-model-management/create-model.md
Normal file
160
docs/docs/apis-model-management/create-model.md
Normal file
@@ -0,0 +1,160 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Create Model
|
||||
|
||||
This API lets you create a custom model on the Ollama server.
|
||||
|
||||
### Create a model from an existing Modelfile in the Ollama server
|
||||
|
||||
```java title="CreateModel.java"
|
||||
public class CreateModel {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.createModelWithFilePath("mario", "/path/to/mario/modelfile/on/ollama-server");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Create a model by passing the contents of Modelfile
|
||||
|
||||
```java title="CreateModel.java"
|
||||
public class CreateModel {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.createModelWithModelFileContents("mario", "FROM llama2\nSYSTEM You are mario from Super Mario Bros.");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Once created, you can see it when you use [list models](./list-models) API.
|
||||
|
||||
### Example of a `Modelfile`
|
||||
|
||||
```
|
||||
FROM llama2
|
||||
# sets the temperature to 1 [higher is more creative, lower is more coherent]
|
||||
PARAMETER temperature 1
|
||||
# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token
|
||||
PARAMETER num_ctx 4096
|
||||
|
||||
# sets a custom system message to specify the behavior of the chat assistant
|
||||
SYSTEM You are Mario from super mario bros, acting as an assistant.
|
||||
```
|
||||
|
||||
### Format of the `Modelfile`
|
||||
|
||||
```modelfile
|
||||
# comment
|
||||
INSTRUCTION arguments
|
||||
```
|
||||
|
||||
| Instruction | Description |
|
||||
|-------------------------------------|----------------------------------------------------------------|
|
||||
| [`FROM`](#from-required) (required) | Defines the base model to use. |
|
||||
| [`PARAMETER`](#parameter) | Sets the parameters for how Ollama will run the model. |
|
||||
| [`TEMPLATE`](#template) | The full prompt template to be sent to the model. |
|
||||
| [`SYSTEM`](#system) | Specifies the system message that will be set in the template. |
|
||||
| [`ADAPTER`](#adapter) | Defines the (Q)LoRA adapters to apply to the model. |
|
||||
| [`LICENSE`](#license) | Specifies the legal license. |
|
||||
|
||||
#### PARAMETER
|
||||
|
||||
The `PARAMETER` instruction defines a parameter that can be set when the model is run.
|
||||
|
||||
| Parameter | Description | Value Type | Example Usage |
|
||||
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------------------|
|
||||
| mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 |
|
||||
| mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 |
|
||||
| mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 |
|
||||
| num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 |
|
||||
| num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 |
|
||||
| num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 |
|
||||
| num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 |
|
||||
| repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 |
|
||||
| repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 |
|
||||
| temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 |
|
||||
| seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 |
|
||||
| stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" |
|
||||
| tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 |
|
||||
| num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 |
|
||||
| top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 |
|
||||
| top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 |
|
||||
|
||||
#### TEMPLATE
|
||||
|
||||
`TEMPLATE` of the full prompt template to be passed into the model. It may include (optionally) a system message and a
|
||||
user's prompt. This is used to create a full custom prompt, and syntax may be model specific. You can usually find the
|
||||
template for a given model in the readme for that model.
|
||||
|
||||
#### Template Variables
|
||||
|
||||
| Variable | Description |
|
||||
|-----------------|---------------------------------------------------------------------------------------------------------------|
|
||||
| `{{ .System }}` | The system message used to specify custom behavior, this must also be set in the Modelfile as an instruction. |
|
||||
| `{{ .Prompt }}` | The incoming prompt, this is not specified in the model file and will be set based on input. |
|
||||
| `{{ .First }}` | A boolean value used to render specific template information for the first generation of a session. |
|
||||
|
||||
```modelfile
|
||||
TEMPLATE """
|
||||
{{- if .First }}
|
||||
### System:
|
||||
{{ .System }}
|
||||
{{- end }}
|
||||
|
||||
### User:
|
||||
{{ .Prompt }}
|
||||
|
||||
### Response:
|
||||
"""
|
||||
|
||||
SYSTEM """<system message>"""
|
||||
```
|
||||
|
||||
### SYSTEM
|
||||
|
||||
The `SYSTEM` instruction specifies the system message to be used in the template, if applicable.
|
||||
|
||||
```modelfile
|
||||
SYSTEM """<system message>"""
|
||||
```
|
||||
|
||||
### ADAPTER
|
||||
|
||||
The `ADAPTER` instruction specifies the LoRA adapter to apply to the base model. The value of this instruction should be
|
||||
an absolute path or a path relative to the Modelfile and the file must be in a GGML file format. The adapter should be
|
||||
tuned from the base model otherwise the behaviour is undefined.
|
||||
|
||||
```modelfile
|
||||
ADAPTER ./ollama-lora.bin
|
||||
```
|
||||
|
||||
### LICENSE
|
||||
|
||||
The `LICENSE` instruction allows you to specify the legal license under which the model used with this Modelfile is
|
||||
shared or distributed.
|
||||
|
||||
```modelfile
|
||||
LICENSE """
|
||||
<license text>
|
||||
"""
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- the **`Modelfile` is not case sensitive**. In the examples, uppercase instructions are used to make it easier to
|
||||
distinguish it from arguments.
|
||||
- Instructions can be in any order. In the examples, the `FROM` instruction is first to keep it easily readable.
|
||||
|
||||
Read more about Modelfile: https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
|
||||
26
docs/docs/apis-model-management/delete-model.md
Normal file
26
docs/docs/apis-model-management/delete-model.md
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Delete Model
|
||||
|
||||
This API lets you create a delete a model from the Ollama server.
|
||||
|
||||
```java title="DeleteModel.java"
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setVerbose(false);
|
||||
|
||||
ollamaAPI.deleteModel("mycustommodel", true);
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Once deleted, you can verify it using [list models](./list-models) API.
|
||||
34
docs/docs/apis-model-management/get-model-details.md
Normal file
34
docs/docs/apis-model-management/get-model-details.md
Normal file
File diff suppressed because one or more lines are too long
30
docs/docs/apis-model-management/list-models.md
Normal file
30
docs/docs/apis-model-management/list-models.md
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# List Models
|
||||
|
||||
This API lets you list available models on the Ollama server.
|
||||
|
||||
```java title="ListModels.java"
|
||||
public class ListModels {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
List<Model> models = ollamaAPI.listModels();
|
||||
|
||||
models.forEach(model -> System.out.println(model.getName()));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you have any models already downloaded on Ollama server, you would have them listed as follows:
|
||||
|
||||
```bash
|
||||
llama2:latest
|
||||
sqlcoder:latest
|
||||
```
|
||||
23
docs/docs/apis-model-management/pull-model.md
Normal file
23
docs/docs/apis-model-management/pull-model.md
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Pull Model
|
||||
|
||||
This API lets you pull a model on the Ollama server.
|
||||
|
||||
```java title="PullModel.java"
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.pullModel(OllamaModelType.LLAMA2);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Once downloaded, you can see them when you use [list models](./list-models) API.
|
||||
@@ -2,46 +2,132 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Tutorial Intro
|
||||
# Introduction
|
||||
|
||||
Let's discover **Docusaurus in less than 5 minutes**.
|
||||
Let's get started with **Ollama4j**.
|
||||
|
||||
## 🦙 What is Ollama?
|
||||
|
||||
[Ollama](https://ollama.ai/) is an advanced AI tool that allows users to easily set up and run large language models
|
||||
locally (in CPU and GPU
|
||||
modes). With Ollama, users can leverage powerful language models such as Llama 2 and even customize and create their own
|
||||
models.
|
||||
|
||||
## 👨💻 Why Ollama4j?
|
||||
|
||||
Ollama4j was built for the simple purpose of integrating Ollama with Java applications.
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
o4j[Ollama4j]
|
||||
o[Ollama Server]
|
||||
o4j -->|Communicates with| o;
|
||||
m[Models]
|
||||
p[Your Java Project]
|
||||
subgraph Your Java Environment
|
||||
direction TB
|
||||
p -->|Uses| o4j
|
||||
end
|
||||
subgraph Ollama Setup
|
||||
direction TB
|
||||
o -->|Manages| m
|
||||
end
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
Get started by **creating a new site**.
|
||||
|
||||
Or **try Docusaurus immediately** with **[docusaurus.new](https://docusaurus.new)**.
|
||||
|
||||
### What you'll need
|
||||
|
||||
- [Node.js](https://nodejs.org/en/download/) version 18.0 or above:
|
||||
- When installing Node.js, you are recommended to check all checkboxes related to dependencies.
|
||||
- **[Ollama](https://ollama.ai/download)**
|
||||
- **[Oracle JDK](https://www.oracle.com/java/technologies/javase/jdk11-archive-downloads.html)** or
|
||||
**[Open JDK](https://jdk.java.net/archive/)** 11.0 or above.
|
||||
- **[Maven](https://maven.apache.org/download.cgi)**
|
||||
|
||||
## Generate a new site
|
||||
### Start Ollama server
|
||||
|
||||
Generate a new Docusaurus site using the **classic template**.
|
||||
The easiest way of getting started with Ollama server is with [Docker](https://docs.docker.com/get-started/overview/).
|
||||
But if you choose to run the
|
||||
Ollama server directly, **[download](https://ollama.ai/download)** the distribution of your choice
|
||||
and follow the installation process.
|
||||
|
||||
The classic template will automatically be added to your project after you run the command:
|
||||
#### With Docker
|
||||
|
||||
##### Run in CPU mode:
|
||||
|
||||
```bash
|
||||
npm init docusaurus@latest my-website classic
|
||||
docker run -it -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||
```
|
||||
|
||||
You can type this command into Command Prompt, Powershell, Terminal, or any other integrated terminal of your code editor.
|
||||
|
||||
The command also installs all necessary dependencies you need to run Docusaurus.
|
||||
|
||||
## Start your site
|
||||
|
||||
Run the development server:
|
||||
##### Run in GPU mode:
|
||||
|
||||
```bash
|
||||
cd my-website
|
||||
npm run start
|
||||
docker run -it --gpus=all -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||
```
|
||||
|
||||
The `cd` command changes the directory you're working with. In order to work with your newly created Docusaurus site, you'll need to navigate the terminal there.
|
||||
You can type this command into Command Prompt, Powershell, Terminal, or any other integrated
|
||||
terminal of your code editor.
|
||||
|
||||
The `npm run start` command builds your website locally and serves it through a development server, ready for you to view at http://localhost:3000/.
|
||||
The command runs the Ollama server locally at **http://localhost:11434/**.
|
||||
|
||||
Open `docs/intro.md` (this page) and edit some lines: the site **reloads automatically** and displays your changes.
|
||||
### Setup your project
|
||||
|
||||
Get started by **creating a new Maven project** on your favorite IDE.
|
||||
|
||||
Add the dependency to your project's `pom.xml`.
|
||||
|
||||
```xml
|
||||
|
||||
<dependency>
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.27</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
|
||||
|
||||
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
|
||||
example,
|
||||
|
||||
Use `slf4j-jdk14` implementation:
|
||||
|
||||
```xml
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-jdk14</artifactId>
|
||||
<version>2.0.9</version> <!--Replace with appropriate version-->
|
||||
</dependency>
|
||||
```
|
||||
|
||||
or use `logback-classic` implementation:
|
||||
|
||||
```xml
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.3.11</version> <!--Replace with appropriate version-->
|
||||
</dependency>
|
||||
```
|
||||
|
||||
or use other suitable implementations.
|
||||
|
||||
Create a new Java class in your project and add this code.
|
||||
|
||||
```java
|
||||
public class OllamaAPITest {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
ollamaAPI.setVerbose(true);
|
||||
|
||||
boolean isOllamaServerReachable = ollamaAPI.ping();
|
||||
|
||||
System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"label": "Tutorial - Basics",
|
||||
"position": 2,
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "5 minutes to learn the most important Docusaurus concepts."
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# Congratulations!
|
||||
|
||||
You have just learned the **basics of Docusaurus** and made some changes to the **initial template**.
|
||||
|
||||
Docusaurus has **much more to offer**!
|
||||
|
||||
Have **5 more minutes**? Take a look at **[versioning](../tutorial-extras/manage-docs-versions.md)** and **[i18n](../tutorial-extras/translate-your-site.md)**.
|
||||
|
||||
Anything **unclear** or **buggy** in this tutorial? [Please report it!](https://github.com/facebook/docusaurus/discussions/4610)
|
||||
|
||||
## What's next?
|
||||
|
||||
- Read the [official documentation](https://docusaurus.io/)
|
||||
- Modify your site configuration with [`docusaurus.config.js`](https://docusaurus.io/docs/api/docusaurus-config)
|
||||
- Add navbar and footer items with [`themeConfig`](https://docusaurus.io/docs/api/themes/configuration)
|
||||
- Add a custom [Design and Layout](https://docusaurus.io/docs/styling-layout)
|
||||
- Add a [search bar](https://docusaurus.io/docs/search)
|
||||
- Find inspirations in the [Docusaurus showcase](https://docusaurus.io/showcase)
|
||||
- Get involved in the [Docusaurus Community](https://docusaurus.io/community/support)
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Create a Blog Post
|
||||
|
||||
Docusaurus creates a **page for each blog post**, but also a **blog index page**, a **tag system**, an **RSS** feed...
|
||||
|
||||
## Create your first Post
|
||||
|
||||
Create a file at `blog/2021-02-28-greetings.md`:
|
||||
|
||||
```md title="blog/2021-02-28-greetings.md"
|
||||
---
|
||||
slug: greetings
|
||||
title: Greetings!
|
||||
authors:
|
||||
- name: Joel Marcey
|
||||
title: Co-creator of Docusaurus 1
|
||||
url: https://github.com/JoelMarcey
|
||||
image_url: https://github.com/JoelMarcey.png
|
||||
- name: Sébastien Lorber
|
||||
title: Docusaurus maintainer
|
||||
url: https://sebastienlorber.com
|
||||
image_url: https://github.com/slorber.png
|
||||
tags: [greetings]
|
||||
---
|
||||
|
||||
Congratulations, you have made your first post!
|
||||
|
||||
Feel free to play around and edit this post as much you like.
|
||||
```
|
||||
|
||||
A new blog post is now available at [http://localhost:3000/blog/greetings](http://localhost:3000/blog/greetings).
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Create a Document
|
||||
|
||||
Documents are **groups of pages** connected through:
|
||||
|
||||
- a **sidebar**
|
||||
- **previous/next navigation**
|
||||
- **versioning**
|
||||
|
||||
## Create your first Doc
|
||||
|
||||
Create a Markdown file at `docs/hello.md`:
|
||||
|
||||
```md title="docs/hello.md"
|
||||
# Hello
|
||||
|
||||
This is my **first Docusaurus document**!
|
||||
```
|
||||
|
||||
A new document is now available at [http://localhost:3000/docs/hello](http://localhost:3000/docs/hello).
|
||||
|
||||
## Configure the Sidebar
|
||||
|
||||
Docusaurus automatically **creates a sidebar** from the `docs` folder.
|
||||
|
||||
Add metadata to customize the sidebar label and position:
|
||||
|
||||
```md title="docs/hello.md" {1-4}
|
||||
---
|
||||
sidebar_label: 'Hi!'
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Hello
|
||||
|
||||
This is my **first Docusaurus document**!
|
||||
```
|
||||
|
||||
It is also possible to create your sidebar explicitly in `sidebars.js`:
|
||||
|
||||
```js title="sidebars.js"
|
||||
export default {
|
||||
tutorialSidebar: [
|
||||
'intro',
|
||||
// highlight-next-line
|
||||
'hello',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Tutorial',
|
||||
items: ['tutorial-basics/create-a-document'],
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Create a Page
|
||||
|
||||
Add **Markdown or React** files to `src/pages` to create a **standalone page**:
|
||||
|
||||
- `src/pages/index.js` → `localhost:3000/`
|
||||
- `src/pages/foo.md` → `localhost:3000/foo`
|
||||
- `src/pages/foo/bar.js` → `localhost:3000/foo/bar`
|
||||
|
||||
## Create your first React Page
|
||||
|
||||
Create a file at `src/pages/my-react-page.js`:
|
||||
|
||||
```jsx title="src/pages/my-react-page.js"
|
||||
import React from 'react';
|
||||
import Layout from '@theme/Layout';
|
||||
|
||||
export default function MyReactPage() {
|
||||
return (
|
||||
<Layout>
|
||||
<h1>My React page</h1>
|
||||
<p>This is a React page</p>
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
A new page is now available at [http://localhost:3000/my-react-page](http://localhost:3000/my-react-page).
|
||||
|
||||
## Create your first Markdown Page
|
||||
|
||||
Create a file at `src/pages/my-markdown-page.md`:
|
||||
|
||||
```mdx title="src/pages/my-markdown-page.md"
|
||||
# My Markdown page
|
||||
|
||||
This is a Markdown page
|
||||
```
|
||||
|
||||
A new page is now available at [http://localhost:3000/my-markdown-page](http://localhost:3000/my-markdown-page).
|
||||
@@ -1,31 +0,0 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Deploy your site
|
||||
|
||||
Docusaurus is a **static-site-generator** (also called **[Jamstack](https://jamstack.org/)**).
|
||||
|
||||
It builds your site as simple **static HTML, JavaScript and CSS files**.
|
||||
|
||||
## Build your site
|
||||
|
||||
Build your site **for production**:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
The static files are generated in the `build` folder.
|
||||
|
||||
## Deploy your site
|
||||
|
||||
Test your production build locally:
|
||||
|
||||
```bash
|
||||
npm run serve
|
||||
```
|
||||
|
||||
The `build` folder is now served at [http://localhost:3000/](http://localhost:3000/).
|
||||
|
||||
You can now deploy the `build` folder **almost anywhere** easily, **for free** or very small cost (read the **[Deployment Guide](https://docusaurus.io/docs/deployment)**).
|
||||
@@ -1,150 +0,0 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Markdown Features
|
||||
|
||||
Docusaurus supports **[Markdown](https://daringfireball.net/projects/markdown/syntax)** and a few **additional features**.
|
||||
|
||||
## Front Matter
|
||||
|
||||
Markdown documents have metadata at the top called [Front Matter](https://jekyllrb.com/docs/front-matter/):
|
||||
|
||||
```text title="my-doc.md"
|
||||
// highlight-start
|
||||
---
|
||||
id: my-doc-id
|
||||
title: My document title
|
||||
description: My document description
|
||||
slug: /my-custom-url
|
||||
---
|
||||
// highlight-end
|
||||
|
||||
## Markdown heading
|
||||
|
||||
Markdown text with [links](./hello.md)
|
||||
```
|
||||
|
||||
## Links
|
||||
|
||||
Regular Markdown links are supported, using url paths or relative file paths.
|
||||
|
||||
```md
|
||||
Let's see how to [Create a page](/create-a-page).
|
||||
```
|
||||
|
||||
```md
|
||||
Let's see how to [Create a page](./create-a-page.md).
|
||||
```
|
||||
|
||||
**Result:** Let's see how to [Create a page](./create-a-page.md).
|
||||
|
||||
## Images
|
||||
|
||||
Regular Markdown images are supported.
|
||||
|
||||
You can use absolute paths to reference images in the static directory (`static/img/docusaurus.png`):
|
||||
|
||||
```md
|
||||

|
||||
```
|
||||
|
||||

|
||||
|
||||
You can reference images relative to the current file as well. This is particularly useful to colocate images close to the Markdown files using them:
|
||||
|
||||
```md
|
||||

|
||||
```
|
||||
|
||||
## Code Blocks
|
||||
|
||||
Markdown code blocks are supported with Syntax highlighting.
|
||||
|
||||
```jsx title="src/components/HelloDocusaurus.js"
|
||||
function HelloDocusaurus() {
|
||||
return (
|
||||
<h1>Hello, Docusaurus!</h1>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
```jsx title="src/components/HelloDocusaurus.js"
|
||||
function HelloDocusaurus() {
|
||||
return <h1>Hello, Docusaurus!</h1>;
|
||||
}
|
||||
```
|
||||
|
||||
## Admonitions
|
||||
|
||||
Docusaurus has a special syntax to create admonitions and callouts:
|
||||
|
||||
:::tip My tip
|
||||
|
||||
Use this awesome feature option
|
||||
|
||||
:::
|
||||
|
||||
:::danger Take care
|
||||
|
||||
This action is dangerous
|
||||
|
||||
:::
|
||||
|
||||
:::tip My tip
|
||||
|
||||
Use this awesome feature option
|
||||
|
||||
:::
|
||||
|
||||
:::danger Take care
|
||||
|
||||
This action is dangerous
|
||||
|
||||
:::
|
||||
|
||||
## MDX and React Components
|
||||
|
||||
[MDX](https://mdxjs.com/) can make your documentation more **interactive** and allows using any **React components inside Markdown**:
|
||||
|
||||
```jsx
|
||||
export const Highlight = ({children, color}) => (
|
||||
<span
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
borderRadius: '20px',
|
||||
color: '#fff',
|
||||
padding: '10px',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onClick={() => {
|
||||
alert(`You clicked the color ${color} with label ${children}`)
|
||||
}}>
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
|
||||
This is <Highlight color="#25c2a0">Docusaurus green</Highlight> !
|
||||
|
||||
This is <Highlight color="#1877F2">Facebook blue</Highlight> !
|
||||
```
|
||||
|
||||
export const Highlight = ({children, color}) => (
|
||||
<span
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
borderRadius: '20px',
|
||||
color: '#fff',
|
||||
padding: '10px',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onClick={() => {
|
||||
alert(`You clicked the color ${color} with label ${children}`);
|
||||
}}>
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
|
||||
This is <Highlight color="#25c2a0">Docusaurus green</Highlight> !
|
||||
|
||||
This is <Highlight color="#1877F2">Facebook blue</Highlight> !
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"label": "Tutorial - Extras",
|
||||
"position": 3,
|
||||
"link": {
|
||||
"type": "generated-index"
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 25 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 27 KiB |
@@ -1,55 +0,0 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Manage Docs Versions
|
||||
|
||||
Docusaurus can manage multiple versions of your docs.
|
||||
|
||||
## Create a docs version
|
||||
|
||||
Release a version 1.0 of your project:
|
||||
|
||||
```bash
|
||||
npm run docusaurus docs:version 1.0
|
||||
```
|
||||
|
||||
The `docs` folder is copied into `versioned_docs/version-1.0` and `versions.json` is created.
|
||||
|
||||
Your docs now have 2 versions:
|
||||
|
||||
- `1.0` at `http://localhost:3000/docs/` for the version 1.0 docs
|
||||
- `current` at `http://localhost:3000/docs/next/` for the **upcoming, unreleased docs**
|
||||
|
||||
## Add a Version Dropdown
|
||||
|
||||
To navigate seamlessly across versions, add a version dropdown.
|
||||
|
||||
Modify the `docusaurus.config.js` file:
|
||||
|
||||
```js title="docusaurus.config.js"
|
||||
export default {
|
||||
themeConfig: {
|
||||
navbar: {
|
||||
items: [
|
||||
// highlight-start
|
||||
{
|
||||
type: 'docsVersionDropdown',
|
||||
},
|
||||
// highlight-end
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
The docs version dropdown appears in your navbar:
|
||||
|
||||

|
||||
|
||||
## Update an existing version
|
||||
|
||||
It is possible to edit versioned docs in their respective folder:
|
||||
|
||||
- `versioned_docs/version-1.0/hello.md` updates `http://localhost:3000/docs/hello`
|
||||
- `docs/hello.md` updates `http://localhost:3000/docs/next/hello`
|
||||
@@ -1,88 +0,0 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Translate your site
|
||||
|
||||
Let's translate `docs/intro.md` to French.
|
||||
|
||||
## Configure i18n
|
||||
|
||||
Modify `docusaurus.config.js` to add support for the `fr` locale:
|
||||
|
||||
```js title="docusaurus.config.js"
|
||||
export default {
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
locales: ['en', 'fr'],
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
## Translate a doc
|
||||
|
||||
Copy the `docs/intro.md` file to the `i18n/fr` folder:
|
||||
|
||||
```bash
|
||||
mkdir -p i18n/fr/docusaurus-plugin-content-docs/current/
|
||||
|
||||
cp docs/intro.md i18n/fr/docusaurus-plugin-content-docs/current/intro.md
|
||||
```
|
||||
|
||||
Translate `i18n/fr/docusaurus-plugin-content-docs/current/intro.md` in French.
|
||||
|
||||
## Start your localized site
|
||||
|
||||
Start your site on the French locale:
|
||||
|
||||
```bash
|
||||
npm run start -- --locale fr
|
||||
```
|
||||
|
||||
Your localized site is accessible at [http://localhost:3000/fr/](http://localhost:3000/fr/) and the `Getting Started` page is translated.
|
||||
|
||||
:::caution
|
||||
|
||||
In development, you can only use one locale at a time.
|
||||
|
||||
:::
|
||||
|
||||
## Add a Locale Dropdown
|
||||
|
||||
To navigate seamlessly across languages, add a locale dropdown.
|
||||
|
||||
Modify the `docusaurus.config.js` file:
|
||||
|
||||
```js title="docusaurus.config.js"
|
||||
export default {
|
||||
themeConfig: {
|
||||
navbar: {
|
||||
items: [
|
||||
// highlight-start
|
||||
{
|
||||
type: 'localeDropdown',
|
||||
},
|
||||
// highlight-end
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
The locale dropdown now appears in your navbar:
|
||||
|
||||

|
||||
|
||||
## Build your localized site
|
||||
|
||||
Build your site for a specific locale:
|
||||
|
||||
```bash
|
||||
npm run build -- --locale fr
|
||||
```
|
||||
|
||||
Or build your site to include all the locales at once:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
@@ -8,131 +8,139 @@ import {themes as prismThemes} from 'prism-react-renderer';
|
||||
|
||||
/** @type {import('@docusaurus/types').Config} */
|
||||
const config = {
|
||||
title: 'Ollama4J',
|
||||
tagline: 'Java library for interacting with Ollama API.',
|
||||
favicon: 'img/favicon.ico',
|
||||
title: 'Ollama4j',
|
||||
tagline: 'Java library for interacting with Ollama.',
|
||||
favicon: 'img/favicon.ico',
|
||||
|
||||
// Set the production url of your site here
|
||||
url: 'https://your-docusaurus-site.example.com',
|
||||
// Set the /<baseUrl>/ pathname under which your site is served
|
||||
// For GitHub pages deployment, it is often '/<projectName>/'
|
||||
baseUrl: '/ollama4j/',
|
||||
// Set the production url of your site here
|
||||
url: 'https://your-docusaurus-site.example.com',
|
||||
// Set the /<baseUrl>/ pathname under which your site is served
|
||||
// For GitHub pages deployment, it is often '/<projectName>/'
|
||||
baseUrl: '/ollama4j/',
|
||||
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
|
||||
projectName: 'ollama4j', // Usually your repo name.
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
|
||||
projectName: 'ollama4j', // Usually your repo name.
|
||||
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
||||
// Even if you don't use internationalization, you can use this field to set
|
||||
// useful metadata like html lang. For example, if your site is Chinese, you
|
||||
// may want to replace "en" with "zh-Hans".
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
locales: ['en'],
|
||||
},
|
||||
// Even if you don't use internationalization, you can use this field to set
|
||||
// useful metadata like html lang. For example, if your site is Chinese, you
|
||||
// may want to replace "en" with "zh-Hans".
|
||||
i18n: {
|
||||
defaultLocale: 'en',
|
||||
locales: ['en'],
|
||||
},
|
||||
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||
({
|
||||
docs: {
|
||||
sidebarPath: './sidebars.js',
|
||||
// Please change this to your repo.
|
||||
// Remove this to remove the "edit this page" links.
|
||||
editUrl:
|
||||
'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
|
||||
},
|
||||
blog: {
|
||||
showReadingTime: true,
|
||||
// Please change this to your repo.
|
||||
// Remove this to remove the "edit this page" links.
|
||||
editUrl:
|
||||
'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
|
||||
},
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
}),
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||
({
|
||||
docs: {
|
||||
path: 'docs',
|
||||
routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
|
||||
sidebarPath: './sidebars.js',
|
||||
// Please change this to your repo.
|
||||
// Remove this to remove the "edit this page" links.
|
||||
editUrl:
|
||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
||||
},
|
||||
blog: {
|
||||
showReadingTime: true,
|
||||
// Please change this to your repo.
|
||||
// Remove this to remove the "edit this page" links.
|
||||
editUrl:
|
||||
'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
|
||||
},
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
}),
|
||||
],
|
||||
],
|
||||
],
|
||||
|
||||
themeConfig:
|
||||
themeConfig:
|
||||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
({
|
||||
// Replace with your project's social card
|
||||
image: 'img/docusaurus-social-card.jpg',
|
||||
navbar: {
|
||||
title: 'Ollama4J',
|
||||
logo: {
|
||||
alt: 'Ollama4J Logo',
|
||||
src: 'img/logo.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'docSidebar',
|
||||
sidebarId: 'tutorialSidebar',
|
||||
position: 'left',
|
||||
label: 'Tutorial',
|
||||
},
|
||||
{to: '/blog', label: 'Blog', position: 'left'},
|
||||
{to: 'https://github.com/amithkoujalgi/ollama4j', label: 'Javadoc', position: 'left'},
|
||||
{
|
||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Docs',
|
||||
items: [
|
||||
{
|
||||
label: 'Tutorial',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'Stack Overflow',
|
||||
href: 'https://stackoverflow.com/questions/tagged/ollama4j',
|
||||
},
|
||||
{
|
||||
label: 'Twitter',
|
||||
href: 'https://twitter.com/ollama4j',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'Blog',
|
||||
to: '/blog',
|
||||
},
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Ollama4J Documentation ${new Date().getFullYear()}. Built with Docusaurus.`,
|
||||
},
|
||||
prism: {
|
||||
theme: prismThemes.github,
|
||||
darkTheme: prismThemes.dracula,
|
||||
},
|
||||
}),
|
||||
({
|
||||
// Replace with your project's social card
|
||||
image: 'img/docusaurus-social-card.jpg',
|
||||
navbar: {
|
||||
title: 'Ollama4j',
|
||||
logo: {
|
||||
alt: 'Ollama4j Logo',
|
||||
src: 'img/logo.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'docSidebar',
|
||||
sidebarId: 'tutorialSidebar',
|
||||
position: 'left',
|
||||
label: 'Docs',
|
||||
},
|
||||
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
||||
{to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
|
||||
{to: '/blog', label: 'Blog', position: 'left'},
|
||||
{
|
||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Docs',
|
||||
items: [
|
||||
{
|
||||
label: 'Tutorial',
|
||||
to: '/intro',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'Stack Overflow',
|
||||
href: 'https://stackoverflow.com/questions/tagged/ollama4j',
|
||||
},
|
||||
{
|
||||
label: 'Twitter',
|
||||
href: 'https://twitter.com/ollama4j',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'More',
|
||||
items: [
|
||||
{
|
||||
label: 'Blog',
|
||||
to: '/blog',
|
||||
},
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Ollama4j Documentation ${new Date().getFullYear()}. Built with Docusaurus.`,
|
||||
},
|
||||
prism: {
|
||||
theme: prismThemes.github,
|
||||
darkTheme: prismThemes.dracula,
|
||||
additionalLanguages: ['java'],
|
||||
},
|
||||
}),
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
themes: ['@docusaurus/theme-mermaid']
|
||||
};
|
||||
|
||||
export default config;
|
||||
|
||||
3067
docs/package-lock.json
generated
3067
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -14,8 +14,9 @@
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.0.1",
|
||||
"@docusaurus/preset-classic": "3.0.1",
|
||||
"@docusaurus/core": "^3.4.0",
|
||||
"@docusaurus/preset-classic": "^3.4.0",
|
||||
"@docusaurus/theme-mermaid": "^3.4.0",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
@@ -23,8 +24,8 @@
|
||||
"react-dom": "^18.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "3.0.1",
|
||||
"@docusaurus/types": "3.0.1"
|
||||
"@docusaurus/module-type-aliases": "^3.4.0",
|
||||
"@docusaurus/types": "^3.4.0"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
0
docs/run.sh
Normal file → Executable file
0
docs/run.sh
Normal file → Executable file
@@ -6,25 +6,35 @@
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
:root {
|
||||
--ifm-color-primary: #2e8555;
|
||||
--ifm-color-primary-dark: #29784c;
|
||||
--ifm-color-primary-darker: #277148;
|
||||
--ifm-color-primary-darkest: #205d3b;
|
||||
--ifm-color-primary-light: #33925d;
|
||||
--ifm-color-primary-lighter: #359962;
|
||||
--ifm-color-primary-lightest: #3cad6e;
|
||||
--ifm-code-font-size: 95%;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
|
||||
--ifm-color-primary: #2e8555;
|
||||
--ifm-color-primary-dark: #29784c;
|
||||
--ifm-color-primary-darker: #277148;
|
||||
--ifm-color-primary-darkest: #205d3b;
|
||||
--ifm-color-primary-light: #33925d;
|
||||
--ifm-color-primary-lighter: #359962;
|
||||
--ifm-color-primary-lightest: #3cad6e;
|
||||
--ifm-code-font-size: 95%;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #25c2a0;
|
||||
--ifm-color-primary-dark: #21af90;
|
||||
--ifm-color-primary-darker: #1fa588;
|
||||
--ifm-color-primary-darkest: #1a8870;
|
||||
--ifm-color-primary-light: #29d5b0;
|
||||
--ifm-color-primary-lighter: #32d8b4;
|
||||
--ifm-color-primary-lightest: #4fddbf;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
||||
--ifm-color-primary: #25c2a0;
|
||||
--ifm-color-primary-dark: #21af90;
|
||||
--ifm-color-primary-darker: #1fa588;
|
||||
--ifm-color-primary-darkest: #1a8870;
|
||||
--ifm-color-primary-light: #29d5b0;
|
||||
--ifm-color-primary-lighter: #32d8b4;
|
||||
--ifm-color-primary-lightest: #4fddbf;
|
||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
|
||||
article > header > h1 {
|
||||
font-size: 2rem !important;
|
||||
}
|
||||
|
||||
div > h1,
|
||||
header > h1,
|
||||
h2 > a {
|
||||
font-size: 2rem !important;
|
||||
}
|
||||
@@ -8,36 +8,33 @@ import Heading from '@theme/Heading';
|
||||
import styles from './index.module.css';
|
||||
|
||||
function HomepageHeader() {
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (
|
||||
<header className={clsx('hero hero--primary', styles.heroBanner)}>
|
||||
<div className="container">
|
||||
<Heading as="h1" className="hero__title">
|
||||
{siteConfig.title}
|
||||
</Heading>
|
||||
<p className="hero__subtitle">{siteConfig.tagline}</p>
|
||||
<div className={styles.buttons}>
|
||||
<Link
|
||||
className="button button--secondary button--lg"
|
||||
to="/docs/intro">
|
||||
Getting Started
|
||||
</Link>
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (<header className={clsx('hero hero--primary', styles.heroBanner)}>
|
||||
<div className="container">
|
||||
<Heading as="h1" className="hero__title">
|
||||
{siteConfig.title}
|
||||
</Heading>
|
||||
<img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo} style={{maxWidth: '20vh'}}/>
|
||||
<p className="hero__subtitle">{siteConfig.tagline}</p>
|
||||
<div className={styles.buttons}>
|
||||
<Link
|
||||
className="button button--secondary button--lg"
|
||||
to="/intro">
|
||||
Getting Started
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
);
|
||||
</header>);
|
||||
}
|
||||
|
||||
export default function Home() {
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (
|
||||
<Layout
|
||||
title={`Hello from ${siteConfig.title}`}
|
||||
description="Description will go into a meta tag in <head />">
|
||||
<HomepageHeader />
|
||||
<main>
|
||||
<HomepageFeatures />
|
||||
</main>
|
||||
</Layout>
|
||||
);
|
||||
const {siteConfig} = useDocusaurusContext();
|
||||
return (<Layout
|
||||
title={`Hello from ${siteConfig.title}`}
|
||||
description="Description will go into a meta tag in <head />">
|
||||
<HomepageHeader/>
|
||||
<main>
|
||||
<HomepageFeatures/>
|
||||
</main>
|
||||
</Layout>);
|
||||
}
|
||||
|
||||
BIN
logo-small.png
Normal file
BIN
logo-small.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.0 KiB |
155
pom.xml
155
pom.xml
@@ -1,14 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>io.github.amithkoujalgi</groupId>
|
||||
<artifactId>ollama4j</artifactId>
|
||||
<version>1.0.28-SNAPSHOT</version>
|
||||
<version>ollama4j-revision</version>
|
||||
|
||||
<name>Ollama4j</name>
|
||||
<description>Java library for interacting with Ollama API.</description>
|
||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
@@ -99,7 +101,7 @@
|
||||
<configuration>
|
||||
<skipTests>${skipUnitTests}</skipTests>
|
||||
<includes>
|
||||
<include>**/unittests/*.java</include>
|
||||
<include>**/unittests/**/*.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
@@ -127,15 +129,15 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-release-plugin</artifactId>
|
||||
<version>3.0.1</version>
|
||||
<configuration>
|
||||
<!-- <goals>install</goals>-->
|
||||
<tagNameFormat>v@{project.version}</tagNameFormat>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- <plugin>-->
|
||||
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
||||
<!-- <artifactId>maven-release-plugin</artifactId>-->
|
||||
<!-- <version>3.0.1</version>-->
|
||||
<!-- <configuration>-->
|
||||
<!-- <!– <goals>install</goals>–>-->
|
||||
<!-- <tagNameFormat>v@{project.version}</tagNameFormat>-->
|
||||
<!-- </configuration>-->
|
||||
<!-- </plugin>-->
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
@@ -149,12 +151,17 @@
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.15.3</version>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>2.17.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.3.11</version>
|
||||
<version>1.5.6</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -174,16 +181,31 @@
|
||||
<version>4.1.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20240205</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<!-- <distributionManagement>-->
|
||||
<!-- <snapshotRepository>-->
|
||||
<!-- <id>ossrh</id>-->
|
||||
<!-- <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>-->
|
||||
<!-- </snapshotRepository>-->
|
||||
<!-- <repository>-->
|
||||
<!-- <id>ossrh</id>-->
|
||||
<!-- <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>-->
|
||||
<!-- </repository>-->
|
||||
<!-- </distributionManagement>-->
|
||||
|
||||
<!-- Replaced publishing packages to GitHub Packages instead of Maven central -->
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>ossrh</id>
|
||||
<url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>ossrh</id>
|
||||
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
|
||||
<id>github</id>
|
||||
<name>GitHub Packages</name>
|
||||
<url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
|
||||
@@ -198,6 +220,29 @@
|
||||
<activation>
|
||||
<activeByDefault>true</activeByDefault>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.11</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>integration-tests</id>
|
||||
@@ -216,39 +261,59 @@
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- <plugin>-->
|
||||
<!-- <groupId>org.apache.maven.plugins</groupId>-->
|
||||
<!-- <artifactId>maven-gpg-plugin</artifactId>-->
|
||||
<!-- <version>3.1.0</version>-->
|
||||
<!-- <executions>-->
|
||||
<!-- <execution>-->
|
||||
<!-- <id>sign-artifacts</id>-->
|
||||
<!-- <phase>verify</phase>-->
|
||||
<!-- <goals>-->
|
||||
<!-- <goal>sign</goal>-->
|
||||
<!-- </goals>-->
|
||||
<!-- <configuration>-->
|
||||
<!-- <!– Prevent gpg from using pinentry programs. Fixes:-->
|
||||
<!-- gpg: signing failed: Inappropriate ioctl for device –>-->
|
||||
<!-- <gpgArguments>-->
|
||||
<!-- <arg>--pinentry-mode</arg>-->
|
||||
<!-- <arg>loopback</arg>-->
|
||||
<!-- </gpgArguments>-->
|
||||
<!-- </configuration>-->
|
||||
<!-- </execution>-->
|
||||
<!-- </executions>-->
|
||||
<!-- </plugin>-->
|
||||
<!-- <plugin>-->
|
||||
<!-- <groupId>org.sonatype.plugins</groupId>-->
|
||||
<!-- <artifactId>nexus-staging-maven-plugin</artifactId>-->
|
||||
<!-- <version>1.6.13</version>-->
|
||||
<!-- <extensions>true</extensions>-->
|
||||
<!-- <configuration>-->
|
||||
<!-- <serverId>ossrh</serverId>-->
|
||||
<!-- <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>-->
|
||||
<!-- <autoReleaseAfterClose>true</autoReleaseAfterClose>-->
|
||||
<!-- </configuration>-->
|
||||
<!-- </plugin>-->
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-gpg-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.7</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>sign-artifacts</id>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>sign</goal>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- Prevent gpg from using pinentry programs. Fixes:
|
||||
gpg: signing failed: Inappropriate ioctl for device -->
|
||||
<gpgArguments>
|
||||
<arg>--pinentry-mode</arg>
|
||||
<arg>loopback</arg>
|
||||
</gpgArguments>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.sonatype.plugins</groupId>
|
||||
<artifactId>nexus-staging-maven-plugin</artifactId>
|
||||
<version>1.6.13</version>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<serverId>ossrh</serverId>
|
||||
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
|
||||
<autoReleaseAfterClose>true</autoReleaseAfterClose>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,18 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Queue;
|
||||
|
||||
public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
|
||||
@Override
|
||||
public String poll() {
|
||||
StringBuilder tokens = new StringBuilder();
|
||||
Iterator<String> iterator = this.listIterator();
|
||||
while (iterator.hasNext()) {
|
||||
tokens.append(iterator.next());
|
||||
iterator.remove();
|
||||
}
|
||||
return tokens.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.exceptions;
|
||||
|
||||
public class ToolInvocationException extends Exception {
|
||||
|
||||
public ToolInvocationException(String s, Exception e) {
|
||||
super(s, e);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.exceptions;
|
||||
|
||||
public class ToolNotFoundException extends Exception {
|
||||
|
||||
public ToolNotFoundException(String s) {
|
||||
super(s);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.impl;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||
|
||||
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
|
||||
private final StringBuffer response = new StringBuffer();
|
||||
|
||||
@Override
|
||||
public void accept(String message) {
|
||||
String substr = message.substring(response.length());
|
||||
response.append(substr);
|
||||
System.out.print(substr);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
@@ -1,14 +1,22 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.OffsetDateTime;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class Model {
|
||||
|
||||
private String name;
|
||||
private String model;
|
||||
@JsonProperty("modified_at")
|
||||
private String modifiedAt;
|
||||
private OffsetDateTime modifiedAt;
|
||||
@JsonProperty("expires_at")
|
||||
private OffsetDateTime expiresAt;
|
||||
private String digest;
|
||||
private long size;
|
||||
@JsonProperty("details")
|
||||
@@ -33,4 +41,13 @@ public class Model {
|
||||
return name.split(":")[1];
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.Map;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@@ -16,5 +17,14 @@ public class ModelDetail {
|
||||
private String parameters;
|
||||
private String template;
|
||||
private String system;
|
||||
private Map<String, String> details;
|
||||
private ModelMeta details;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@@ -21,4 +23,13 @@ public class ModelMeta {
|
||||
|
||||
@JsonProperty("quantization_level")
|
||||
private String quantizationLevel;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Queue;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class OllamaAsyncResultCallback extends Thread {
|
||||
private final HttpClient client;
|
||||
private final URI uri;
|
||||
private final OllamaRequestModel ollamaRequestModel;
|
||||
private final Queue<String> queue = new LinkedList<>();
|
||||
private String result;
|
||||
private boolean isDone;
|
||||
private boolean succeeded;
|
||||
|
||||
private long requestTimeoutSeconds;
|
||||
|
||||
private int httpStatusCode;
|
||||
private long responseTime = 0;
|
||||
|
||||
public OllamaAsyncResultCallback(
|
||||
HttpClient client,
|
||||
URI uri,
|
||||
OllamaRequestModel ollamaRequestModel,
|
||||
long requestTimeoutSeconds) {
|
||||
this.client = client;
|
||||
this.ollamaRequestModel = ollamaRequestModel;
|
||||
this.uri = uri;
|
||||
this.isDone = false;
|
||||
this.result = "";
|
||||
this.queue.add("");
|
||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpRequest request =
|
||||
HttpRequest.newBuilder(uri)
|
||||
.POST(
|
||||
HttpRequest.BodyPublishers.ofString(
|
||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
||||
.header("Content-Type", "application/json")
|
||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||
.build();
|
||||
HttpResponse<InputStream> response =
|
||||
client.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||
int statusCode = response.statusCode();
|
||||
this.httpStatusCode = statusCode;
|
||||
|
||||
InputStream responseBodyStream = response.body();
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
StringBuilder responseBuffer = new StringBuilder();
|
||||
while ((line = reader.readLine()) != null) {
|
||||
if (statusCode == 404) {
|
||||
OllamaErrorResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||
queue.add(ollamaResponseModel.getError());
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else {
|
||||
OllamaResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
||||
queue.add(ollamaResponseModel.getResponse());
|
||||
if (!ollamaResponseModel.isDone()) {
|
||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.isDone = true;
|
||||
this.succeeded = true;
|
||||
this.result = responseBuffer.toString();
|
||||
long endTime = System.currentTimeMillis();
|
||||
responseTime = endTime - startTime;
|
||||
}
|
||||
if (statusCode != 200) {
|
||||
throw new OllamaBaseException(this.result);
|
||||
}
|
||||
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
||||
this.isDone = true;
|
||||
this.succeeded = false;
|
||||
this.result = "[FAILED] " + e.getMessage();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the status of the thread. This does not indicate that the request was successful or a
|
||||
* failure, rather it is just a status flag to indicate if the thread is active or ended.
|
||||
*
|
||||
* @return boolean - status
|
||||
*/
|
||||
public boolean isComplete() {
|
||||
return isDone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the HTTP response status code for the request that was made to Ollama server.
|
||||
*
|
||||
* @return int - the status code for the request
|
||||
*/
|
||||
public int getHttpStatusCode() {
|
||||
return httpStatusCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the status of the request. Indicates if the request was successful or a failure. If the
|
||||
* request was a failure, the `getResponse()` method will return the error message.
|
||||
*
|
||||
* @return boolean - status
|
||||
*/
|
||||
public boolean isSucceeded() {
|
||||
return succeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the final response when the execution completes. Does not return intermediate results.
|
||||
*
|
||||
* @return String - response text
|
||||
*/
|
||||
public String getResponse() {
|
||||
return result;
|
||||
}
|
||||
|
||||
public Queue<String> getStream() {
|
||||
return queue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the response time in milliseconds.
|
||||
*
|
||||
* @return long - response time in milliseconds.
|
||||
*/
|
||||
public long getResponseTime() {
|
||||
return responseTime;
|
||||
}
|
||||
|
||||
public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
|
||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = true)
|
||||
@SuppressWarnings("unused")
|
||||
public class OllamaAsyncResultStreamer extends Thread {
|
||||
private final HttpRequest.Builder requestBuilder;
|
||||
private final OllamaGenerateRequestModel ollamaRequestModel;
|
||||
private final OllamaResultStream stream = new OllamaResultStream();
|
||||
private String completeResponse;
|
||||
|
||||
|
||||
/**
|
||||
* -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
|
||||
* failure. If the request was a failure, the `getResponse()` method will return the error
|
||||
* message.
|
||||
*/
|
||||
@Getter
|
||||
private boolean succeeded;
|
||||
|
||||
@Setter
|
||||
private long requestTimeoutSeconds;
|
||||
|
||||
/**
|
||||
* -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
|
||||
* server.
|
||||
*/
|
||||
@Getter
|
||||
private int httpStatusCode;
|
||||
|
||||
/**
|
||||
* -- GETTER -- Returns the response time in milliseconds.
|
||||
*/
|
||||
@Getter
|
||||
private long responseTime = 0;
|
||||
|
||||
public OllamaAsyncResultStreamer(
|
||||
HttpRequest.Builder requestBuilder,
|
||||
OllamaGenerateRequestModel ollamaRequestModel,
|
||||
long requestTimeoutSeconds) {
|
||||
this.requestBuilder = requestBuilder;
|
||||
this.ollamaRequestModel = ollamaRequestModel;
|
||||
this.completeResponse = "";
|
||||
this.stream.add("");
|
||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
ollamaRequestModel.setStream(true);
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
try {
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpRequest request =
|
||||
requestBuilder
|
||||
.POST(
|
||||
HttpRequest.BodyPublishers.ofString(
|
||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
|
||||
.header("Content-Type", "application/json")
|
||||
.timeout(Duration.ofSeconds(requestTimeoutSeconds))
|
||||
.build();
|
||||
HttpResponse<InputStream> response =
|
||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||
int statusCode = response.statusCode();
|
||||
this.httpStatusCode = statusCode;
|
||||
|
||||
InputStream responseBodyStream = response.body();
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
StringBuilder responseBuffer = new StringBuilder();
|
||||
while ((line = reader.readLine()) != null) {
|
||||
if (statusCode == 404) {
|
||||
OllamaErrorResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||
stream.add(ollamaResponseModel.getError());
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else {
|
||||
OllamaGenerateResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||
String res = ollamaResponseModel.getResponse();
|
||||
stream.add(res);
|
||||
if (!ollamaResponseModel.isDone()) {
|
||||
responseBuffer.append(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.succeeded = true;
|
||||
this.completeResponse = responseBuffer.toString();
|
||||
long endTime = System.currentTimeMillis();
|
||||
responseTime = endTime - startTime;
|
||||
}
|
||||
if (statusCode != 200) {
|
||||
throw new OllamaBaseException(this.completeResponse);
|
||||
}
|
||||
} catch (IOException | InterruptedException | OllamaBaseException e) {
|
||||
this.succeeded = false;
|
||||
this.completeResponse = "[FAILED] " + e.getMessage();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import java.util.Map;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public abstract class OllamaCommonRequestModel {
|
||||
|
||||
protected String model;
|
||||
@JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
|
||||
@JsonProperty(value = "format")
|
||||
protected Boolean returnFormatJson;
|
||||
protected Map<String, Object> options;
|
||||
protected String template;
|
||||
protected boolean stream;
|
||||
@JsonProperty(value = "keep_alive")
|
||||
protected String keepAlive;
|
||||
|
||||
|
||||
public String toString() {
|
||||
try {
|
||||
return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,6 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
|
||||
|
||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OllamaRequestModel {
|
||||
|
||||
private String model;
|
||||
private String prompt;
|
||||
private List<String> images;
|
||||
|
||||
public OllamaRequestModel(String model, String prompt) {
|
||||
this.model = model;
|
||||
this.prompt = prompt;
|
||||
}
|
||||
|
||||
public OllamaRequestModel(String model, String prompt, List<String> images) {
|
||||
this.model = model;
|
||||
this.prompt = prompt;
|
||||
this.images = images;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
try {
|
||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,9 @@ import lombok.Getter;
|
||||
public class OllamaResult {
|
||||
/**
|
||||
* -- GETTER --
|
||||
* Get the response text
|
||||
* Get the completion/response text
|
||||
*
|
||||
* @return String - response text
|
||||
* @return String completion/response text
|
||||
*/
|
||||
private final String response;
|
||||
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer;
|
||||
|
||||
import java.util.List;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
/**
|
||||
* Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
|
||||
*
|
||||
* @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
|
||||
*/
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
@RequiredArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public class OllamaChatMessage {
|
||||
|
||||
@NonNull
|
||||
private OllamaChatMessageRole role;
|
||||
|
||||
@NonNull
|
||||
private String content;
|
||||
|
||||
@JsonSerialize(using = FileToBase64Serializer.class)
|
||||
private List<byte[]> images;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
|
||||
/**
|
||||
* Defines the possible Chat Message roles.
|
||||
*/
|
||||
public enum OllamaChatMessageRole {
|
||||
SYSTEM("system"),
|
||||
USER("user"),
|
||||
ASSISTANT("assistant");
|
||||
|
||||
@JsonValue
|
||||
private String roleName;
|
||||
|
||||
private OllamaChatMessageRole(String roleName){
|
||||
this.roleName = roleName;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
|
||||
/**
|
||||
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
|
||||
*/
|
||||
public class OllamaChatRequestBuilder {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
|
||||
|
||||
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
|
||||
request = new OllamaChatRequestModel(model, messages);
|
||||
}
|
||||
|
||||
private OllamaChatRequestModel request;
|
||||
|
||||
public static OllamaChatRequestBuilder getInstance(String model){
|
||||
return new OllamaChatRequestBuilder(model, new ArrayList<>());
|
||||
}
|
||||
|
||||
public OllamaChatRequestModel build(){
|
||||
return request;
|
||||
}
|
||||
|
||||
public void reset(){
|
||||
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){
|
||||
List<OllamaChatMessage> messages = this.request.getMessages();
|
||||
|
||||
List<byte[]> binaryImages = images.stream().map(file -> {
|
||||
try {
|
||||
return Files.readAllBytes(file.toPath());
|
||||
} catch (IOException e) {
|
||||
LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e);
|
||||
return new byte[0];
|
||||
}
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
messages.add(new OllamaChatMessage(role,content,binaryImages));
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){
|
||||
List<OllamaChatMessage> messages = this.request.getMessages();
|
||||
List<byte[]> binaryImages = null;
|
||||
if(imageUrls.length>0){
|
||||
binaryImages = new ArrayList<>();
|
||||
for (String imageUrl : imageUrls) {
|
||||
try{
|
||||
binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
|
||||
}
|
||||
catch (URISyntaxException e){
|
||||
LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e);
|
||||
}
|
||||
catch (IOException e){
|
||||
LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
messages.add(new OllamaChatMessage(role,content,binaryImages));
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
|
||||
this.request.getMessages().addAll(messages);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withOptions(Options options){
|
||||
this.request.setOptions(options.getOptionsMap());
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withGetJsonResponse(){
|
||||
this.request.setReturnFormatJson(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withTemplate(String template){
|
||||
this.request.setTemplate(template);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withStreaming(){
|
||||
this.request.setStream(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaChatRequestBuilder withKeepAlive(String keepAlive){
|
||||
this.request.setKeepAlive(keepAlive);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import java.util.List;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
/**
|
||||
* Defines a Request to use against the ollama /api/chat endpoint.
|
||||
*
|
||||
* @see <a href=
|
||||
* "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
|
||||
* Chat Completion</a>
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
|
||||
|
||||
private List<OllamaChatMessage> messages;
|
||||
|
||||
public OllamaChatRequestModel() {}
|
||||
|
||||
public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
|
||||
this.model = model;
|
||||
this.messages = messages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof OllamaChatRequestModel)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.toString().equals(o.toString());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class OllamaChatResponseModel {
|
||||
private String model;
|
||||
private @JsonProperty("created_at") String createdAt;
|
||||
private @JsonProperty("done_reason") String doneReason;
|
||||
private OllamaChatMessage message;
|
||||
private boolean done;
|
||||
private String error;
|
||||
private List<Integer> context;
|
||||
private @JsonProperty("total_duration") Long totalDuration;
|
||||
private @JsonProperty("load_duration") Long loadDuration;
|
||||
private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
|
||||
private @JsonProperty("eval_duration") Long evalDuration;
|
||||
private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
|
||||
private @JsonProperty("eval_count") Integer evalCount;
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
|
||||
/**
|
||||
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
|
||||
* {@link OllamaChatMessageRole#ASSISTANT} role.
|
||||
*/
|
||||
public class OllamaChatResult extends OllamaResult{
|
||||
|
||||
private List<OllamaChatMessage> chatHistory;
|
||||
|
||||
public OllamaChatResult(String response, long responseTime, int httpStatusCode,
|
||||
List<OllamaChatMessage> chatHistory) {
|
||||
super(response, responseTime, httpStatusCode);
|
||||
this.chatHistory = chatHistory;
|
||||
appendAnswerToChatHistory(response);
|
||||
}
|
||||
|
||||
public List<OllamaChatMessage> getChatHistory() {
|
||||
return chatHistory;
|
||||
}
|
||||
|
||||
private void appendAnswerToChatHistory(String answer){
|
||||
OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
|
||||
this.chatHistory.add(assistantMessage);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class OllamaChatStreamObserver {
|
||||
|
||||
private OllamaStreamHandler streamHandler;
|
||||
|
||||
private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
|
||||
|
||||
private String message = "";
|
||||
|
||||
public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
|
||||
this.streamHandler = streamHandler;
|
||||
}
|
||||
|
||||
public void notify(OllamaChatResponseModel currentResponsePart) {
|
||||
responseParts.add(currentResponsePart);
|
||||
handleCurrentResponsePart(currentResponsePart);
|
||||
}
|
||||
|
||||
protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
|
||||
message = message + currentResponsePart.getMessage().getContent();
|
||||
streamHandler.accept(message);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
@@ -7,7 +7,7 @@ import lombok.Data;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@Data
|
||||
public class EmbeddingResponse {
|
||||
public class OllamaEmbeddingResponseModel {
|
||||
@JsonProperty("embedding")
|
||||
private List<Double> embedding;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
|
||||
public class OllamaEmbeddingsRequestBuilder {
|
||||
|
||||
private OllamaEmbeddingsRequestBuilder(String model, String prompt){
|
||||
request = new OllamaEmbeddingsRequestModel(model, prompt);
|
||||
}
|
||||
|
||||
private OllamaEmbeddingsRequestModel request;
|
||||
|
||||
public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
|
||||
return new OllamaEmbeddingsRequestBuilder(model, prompt);
|
||||
}
|
||||
|
||||
public OllamaEmbeddingsRequestModel build(){
|
||||
return request;
|
||||
}
|
||||
|
||||
public OllamaEmbeddingsRequestBuilder withOptions(Options options){
|
||||
this.request.setOptions(options.getOptionsMap());
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
|
||||
this.request.setKeepAlive(keepAlive);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
|
||||
|
||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||
import java.util.Map;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Data
|
||||
@RequiredArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public class OllamaEmbeddingsRequestModel {
|
||||
@NonNull
|
||||
private String model;
|
||||
@NonNull
|
||||
private String prompt;
|
||||
|
||||
protected Map<String, Object> options;
|
||||
@JsonProperty(value = "keep_alive")
|
||||
private String keepAlive;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||
|
||||
/**
|
||||
* Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}
|
||||
* objects using the builder-pattern.
|
||||
*/
|
||||
public class OllamaGenerateRequestBuilder {
|
||||
|
||||
private OllamaGenerateRequestBuilder(String model, String prompt){
|
||||
request = new OllamaGenerateRequestModel(model, prompt);
|
||||
}
|
||||
|
||||
private OllamaGenerateRequestModel request;
|
||||
|
||||
public static OllamaGenerateRequestBuilder getInstance(String model){
|
||||
return new OllamaGenerateRequestBuilder(model,"");
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestModel build(){
|
||||
return request;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withPrompt(String prompt){
|
||||
request.setPrompt(prompt);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withGetJsonResponse(){
|
||||
this.request.setReturnFormatJson(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withOptions(Options options){
|
||||
this.request.setOptions(options.getOptionsMap());
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withTemplate(String template){
|
||||
this.request.setTemplate(template);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withStreaming(){
|
||||
this.request.setStream(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
|
||||
this.request.setKeepAlive(keepAlive);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter
|
||||
@Setter
|
||||
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
|
||||
|
||||
private String prompt;
|
||||
private List<String> images;
|
||||
|
||||
private String system;
|
||||
private String context;
|
||||
private boolean raw;
|
||||
|
||||
public OllamaGenerateRequestModel() {
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestModel(String model, String prompt) {
|
||||
this.model = model;
|
||||
this.prompt = prompt;
|
||||
}
|
||||
|
||||
public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
|
||||
this.model = model;
|
||||
this.prompt = prompt;
|
||||
this.images = images;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof OllamaGenerateRequestModel)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.toString().equals(o.toString());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
@@ -8,7 +8,7 @@ import lombok.Data;
|
||||
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class OllamaResponseModel {
|
||||
public class OllamaGenerateResponseModel {
|
||||
private String model;
|
||||
private @JsonProperty("created_at") String createdAt;
|
||||
private String response;
|
||||
@@ -0,0 +1,29 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class OllamaGenerateStreamObserver {
|
||||
|
||||
private OllamaStreamHandler streamHandler;
|
||||
|
||||
private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
|
||||
|
||||
private String message = "";
|
||||
|
||||
public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
|
||||
this.streamHandler = streamHandler;
|
||||
}
|
||||
|
||||
public void notify(OllamaGenerateResponseModel currentResponsePart) {
|
||||
responseParts.add(currentResponsePart);
|
||||
handleCurrentResponsePart(currentResponsePart);
|
||||
}
|
||||
|
||||
protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) {
|
||||
message = message + currentResponsePart.getResponse();
|
||||
streamHandler.accept(message);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
|
||||
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public interface OllamaStreamHandler extends Consumer<String> {
|
||||
void accept(String message);
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
public class ModelEmbeddingsRequest {
|
||||
private String model;
|
||||
private String prompt;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Specialization class for requests
|
||||
*/
|
||||
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
||||
|
||||
private OllamaChatStreamObserver streamObserver;
|
||||
|
||||
public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getEndpointSuffix() {
|
||||
return "/api/chat";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||
try {
|
||||
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
||||
if (streamObserver != null) {
|
||||
streamObserver.notify(ollamaResponseModel);
|
||||
}
|
||||
return ollamaResponseModel.isDone();
|
||||
} catch (JsonProcessingException e) {
|
||||
LOG.error("Error parsing the Ollama chat response!", e);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
streamObserver = new OllamaChatStreamObserver(streamHandler);
|
||||
return super.callSync(body);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Base64;
|
||||
|
||||
/**
|
||||
* Abstract helperclass to call the ollama api server.
|
||||
*/
|
||||
public abstract class OllamaEndpointCaller {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
||||
|
||||
private String host;
|
||||
private BasicAuth basicAuth;
|
||||
private long requestTimeoutSeconds;
|
||||
private boolean verbose;
|
||||
|
||||
public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||
this.host = host;
|
||||
this.basicAuth = basicAuth;
|
||||
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||
this.verbose = verbose;
|
||||
}
|
||||
|
||||
protected abstract String getEndpointSuffix();
|
||||
|
||||
protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
|
||||
|
||||
|
||||
/**
|
||||
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
|
||||
*
|
||||
* @param body POST body payload
|
||||
* @return result answer given by the assistant
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||
*/
|
||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||
// Create Request
|
||||
long startTime = System.currentTimeMillis();
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
URI uri = URI.create(this.host + getEndpointSuffix());
|
||||
HttpRequest.Builder requestBuilder =
|
||||
getRequestBuilderDefault(uri)
|
||||
.POST(
|
||||
body.getBodyPublisher());
|
||||
HttpRequest request = requestBuilder.build();
|
||||
if (this.verbose) LOG.info("Asking model: " + body.toString());
|
||||
HttpResponse<InputStream> response =
|
||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||
|
||||
int statusCode = response.statusCode();
|
||||
InputStream responseBodyStream = response.body();
|
||||
StringBuilder responseBuffer = new StringBuilder();
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
if (statusCode == 404) {
|
||||
LOG.warn("Status code: 404 (Not Found)");
|
||||
OllamaErrorResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else if (statusCode == 401) {
|
||||
LOG.warn("Status code: 401 (Unauthorized)");
|
||||
OllamaErrorResponseModel ollamaResponseModel =
|
||||
Utils.getObjectMapper()
|
||||
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else if (statusCode == 400) {
|
||||
LOG.warn("Status code: 400 (Bad Request)");
|
||||
OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
|
||||
OllamaErrorResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else {
|
||||
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
|
||||
if (finished) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (statusCode != 200) {
|
||||
LOG.error("Status code " + statusCode);
|
||||
throw new OllamaBaseException(responseBuffer.toString());
|
||||
} else {
|
||||
long endTime = System.currentTimeMillis();
|
||||
OllamaResult ollamaResult =
|
||||
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
||||
if (verbose) LOG.info("Model response: " + ollamaResult);
|
||||
return ollamaResult;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default request builder.
|
||||
*
|
||||
* @param uri URI to get a HttpRequest.Builder
|
||||
* @return HttpRequest.Builder
|
||||
*/
|
||||
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
||||
HttpRequest.Builder requestBuilder =
|
||||
HttpRequest.newBuilder(uri)
|
||||
.header("Content-Type", "application/json")
|
||||
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
|
||||
if (isBasicAuthCredentialsSet()) {
|
||||
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
||||
}
|
||||
return requestBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get basic authentication header value.
|
||||
*
|
||||
* @return basic authentication header value (encoded credentials)
|
||||
*/
|
||||
private String getBasicAuthHeaderValue() {
|
||||
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
|
||||
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Basic Auth credentials set.
|
||||
*
|
||||
* @return true when Basic Auth credentials set
|
||||
*/
|
||||
private boolean isBasicAuthCredentialsSet() {
|
||||
return this.basicAuth != null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
||||
|
||||
private OllamaGenerateStreamObserver streamObserver;
|
||||
|
||||
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getEndpointSuffix() {
|
||||
return "/api/generate";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||
try {
|
||||
OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
|
||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||
if (streamObserver != null) {
|
||||
streamObserver.notify(ollamaResponseModel);
|
||||
}
|
||||
return ollamaResponseModel.isDone();
|
||||
} catch (JsonProcessingException e) {
|
||||
LOG.error("Error parsing the Ollama chat response!", e);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
|
||||
throws OllamaBaseException, IOException, InterruptedException {
|
||||
streamObserver = new OllamaGenerateStreamObserver(streamHandler);
|
||||
return super.callSync(body);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||
|
||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class OllamaToolsResult {
|
||||
private OllamaResult modelResult;
|
||||
private Map<ToolFunctionCallSpec, Object> toolResults;
|
||||
|
||||
public List<ToolResult> getToolResults() {
|
||||
List<ToolResult> results = new ArrayList<>();
|
||||
for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
|
||||
results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class ToolResult {
|
||||
private String functionName;
|
||||
private Map<String, Object> functionArguments;
|
||||
private Object result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface ToolFunction {
|
||||
Object apply(Map<String, Object> arguments);
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public class ToolFunctionCallSpec {
|
||||
private String name;
|
||||
private Map<String, Object> arguments;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class ToolRegistry {
|
||||
private final Map<String, ToolFunction> functionMap = new HashMap<>();
|
||||
|
||||
public ToolFunction getFunction(String name) {
|
||||
return functionMap.get(name);
|
||||
}
|
||||
|
||||
public void addFunction(String name, ToolFunction function) {
|
||||
functionMap.put(name, function);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.tools;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Tools {
|
||||
@Data
|
||||
@Builder
|
||||
public static class ToolSpecification {
|
||||
private String functionName;
|
||||
private String functionDescription;
|
||||
private Map<String, PromptFuncDefinition.Property> properties;
|
||||
private ToolFunction toolDefinition;
|
||||
}
|
||||
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class PromptFuncDefinition {
|
||||
private String type;
|
||||
private PromptFuncSpec function;
|
||||
|
||||
@Data
|
||||
public static class PromptFuncSpec {
|
||||
private String name;
|
||||
private String description;
|
||||
private Parameters parameters;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class Parameters {
|
||||
private String type;
|
||||
private Map<String, Property> properties;
|
||||
private List<String> required;
|
||||
}
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
public static class Property {
|
||||
private String type;
|
||||
private String description;
|
||||
@JsonProperty("enum")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
private List<String> enumValues;
|
||||
@JsonIgnore
|
||||
private boolean required;
|
||||
}
|
||||
}
|
||||
|
||||
public static class PropsBuilder {
|
||||
private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
|
||||
|
||||
public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
|
||||
props.put(key, property);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, PromptFuncDefinition.Property> build() {
|
||||
return props;
|
||||
}
|
||||
}
|
||||
|
||||
public static class PromptBuilder {
|
||||
private final List<PromptFuncDefinition> tools = new ArrayList<>();
|
||||
|
||||
private String promptText;
|
||||
|
||||
public String build() throws JsonProcessingException {
|
||||
return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
|
||||
}
|
||||
|
||||
public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
|
||||
promptText = prompt;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PromptBuilder withToolSpecification(ToolSpecification spec) {
|
||||
PromptFuncDefinition def = new PromptFuncDefinition();
|
||||
def.setType("function");
|
||||
|
||||
PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
|
||||
functionDetail.setName(spec.getFunctionName());
|
||||
functionDetail.setDescription(spec.getFunctionDescription());
|
||||
|
||||
PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
|
||||
parameters.setType("object");
|
||||
parameters.setProperties(spec.getProperties());
|
||||
|
||||
List<String> requiredValues = new ArrayList<>();
|
||||
for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
|
||||
if (p.getValue().isRequired()) {
|
||||
requiredValues.add(p.getKey());
|
||||
}
|
||||
}
|
||||
parameters.setRequired(requiredValues);
|
||||
functionDetail.setParameters(parameters);
|
||||
def.setFunction(functionDetail);
|
||||
|
||||
tools.add(def);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,56 +8,81 @@ package io.github.amithkoujalgi.ollama4j.core.types;
|
||||
*/
|
||||
@SuppressWarnings("ALL")
|
||||
public class OllamaModelType {
|
||||
public static final String LLAMA2 = "llama2";
|
||||
public static final String MISTRAL = "mistral";
|
||||
public static final String LLAVA = "llava";
|
||||
public static final String MIXTRAL = "mixtral";
|
||||
public static final String STARLING_LM = "starling-lm";
|
||||
public static final String NEURAL_CHAT = "neural-chat";
|
||||
public static final String CODELLAMA = "codellama";
|
||||
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
||||
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
||||
public static final String ORCA_MINI = "orca-mini";
|
||||
public static final String VICUNA = "vicuna";
|
||||
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
||||
public static final String PHIND_CODELLAMA = "phind-codellama";
|
||||
public static final String ZEPHYR = "zephyr";
|
||||
public static final String WIZARDCODER = "wizardcoder";
|
||||
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
||||
public static final String NOUS_HERMES = "nous-hermes";
|
||||
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
||||
public static final String WIZARD_MATH = "wizard-math";
|
||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||
public static final String FALCON = "falcon";
|
||||
public static final String ORCA2 = "orca2";
|
||||
public static final String STABLE_BELUGA = "stable-beluga";
|
||||
public static final String CODEUP = "codeup";
|
||||
public static final String EVERYTHINGLM = "everythinglm";
|
||||
public static final String MEDLLAMA2 = "medllama2";
|
||||
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
||||
public static final String STARCODER = "starcoder";
|
||||
public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
|
||||
public static final String OPENCHAT = "openchat";
|
||||
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
||||
public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
|
||||
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
||||
public static final String YI = "yi";
|
||||
public static final String YARN_MISTRAL = "yarn-mistral";
|
||||
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
||||
public static final String SQLCODER = "sqlcoder";
|
||||
public static final String YARN_LLAMA2 = "yarn-llama2";
|
||||
public static final String MEDITRON = "meditron";
|
||||
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
||||
public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
|
||||
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
||||
public static final String MISTRALLITE = "mistrallite";
|
||||
public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
|
||||
public static final String WIZARDLM = "wizardlm";
|
||||
public static final String CODEBOOGA = "codebooga";
|
||||
public static final String MAGICODER = "magicoder";
|
||||
public static final String GOLIATH = "goliath";
|
||||
public static final String NEXUSRAVEN = "nexusraven";
|
||||
public static final String ALFRED = "alfred";
|
||||
public static final String XWINLM = "xwinlm";
|
||||
public static final String BAKLLAVA = "bakllava";
|
||||
public static final String GEMMA = "gemma";
|
||||
public static final String GEMMA2 = "gemma2";
|
||||
|
||||
|
||||
public static final String LLAMA2 = "llama2";
|
||||
public static final String LLAMA3 = "llama3";
|
||||
public static final String MISTRAL = "mistral";
|
||||
public static final String MIXTRAL = "mixtral";
|
||||
public static final String LLAVA = "llava";
|
||||
public static final String LLAVA_PHI3 = "llava-phi3";
|
||||
public static final String NEURAL_CHAT = "neural-chat";
|
||||
public static final String CODELLAMA = "codellama";
|
||||
public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
|
||||
public static final String MISTRAL_OPENORCA = "mistral-openorca";
|
||||
public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
|
||||
public static final String PHI = "phi";
|
||||
public static final String PHI3 = "phi3";
|
||||
public static final String ORCA_MINI = "orca-mini";
|
||||
public static final String DEEPSEEK_CODER = "deepseek-coder";
|
||||
public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
|
||||
public static final String VICUNA = "vicuna";
|
||||
public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
|
||||
public static final String ZEPHYR = "zephyr";
|
||||
public static final String OPENHERMES = "openhermes";
|
||||
public static final String QWEN = "qwen";
|
||||
|
||||
public static final String QWEN2 = "qwen2";
|
||||
public static final String WIZARDCODER = "wizardcoder";
|
||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||
public static final String TINYLLAMA = "tinyllama";
|
||||
public static final String PHIND_CODELLAMA = "phind-codellama";
|
||||
public static final String OPENCHAT = "openchat";
|
||||
public static final String ORCA2 = "orca2";
|
||||
public static final String FALCON = "falcon";
|
||||
public static final String WIZARD_MATH = "wizard-math";
|
||||
public static final String TINYDOLPHIN = "tinydolphin";
|
||||
public static final String NOUS_HERMES = "nous-hermes";
|
||||
public static final String YI = "yi";
|
||||
public static final String DOLPHIN_PHI = "dolphin-phi";
|
||||
public static final String STARLING_LM = "starling-lm";
|
||||
public static final String STARCODER = "starcoder";
|
||||
public static final String CODEUP = "codeup";
|
||||
public static final String MEDLLAMA2 = "medllama2";
|
||||
public static final String STABLE_CODE = "stable-code";
|
||||
public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
|
||||
public static final String BAKLLAVA = "bakllava";
|
||||
public static final String EVERYTHINGLM = "everythinglm";
|
||||
public static final String SOLAR = "solar";
|
||||
public static final String STABLE_BELUGA = "stable-beluga";
|
||||
public static final String SQLCODER = "sqlcoder";
|
||||
public static final String YARN_MISTRAL = "yarn-mistral";
|
||||
public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
|
||||
public static final String SAMANTHA_MISTRAL = "samantha-mistral";
|
||||
public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
|
||||
public static final String MEDITRON = "meditron";
|
||||
public static final String WIZARD_VICUNA = "wizard-vicuna";
|
||||
public static final String STABLELM2 = "stablelm2";
|
||||
public static final String MAGICODER = "magicoder";
|
||||
public static final String YARN_LLAMA2 = "yarn-llama2";
|
||||
public static final String NOUS_HERMES2 = "nous-hermes2";
|
||||
public static final String DEEPSEEK_LLM = "deepseek-llm";
|
||||
public static final String LLAMA_PRO = "llama-pro";
|
||||
public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
|
||||
public static final String CODEBOOGA = "codebooga";
|
||||
public static final String MISTRALLITE = "mistrallite";
|
||||
public static final String NEXUSRAVEN = "nexusraven";
|
||||
public static final String GOLIATH = "goliath";
|
||||
public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
|
||||
public static final String NOTUX = "notux";
|
||||
public static final String ALFRED = "alfred";
|
||||
public static final String MEGADOLPHIN = "megadolphin";
|
||||
public static final String WIZARDLM = "wizardlm";
|
||||
public static final String XWINLM = "xwinlm";
|
||||
public static final String NOTUS = "notus";
|
||||
public static final String DUCKDB_NSQL = "duckdb-nsql";
|
||||
public static final String ALL_MINILM = "all-minilm";
|
||||
public static final String CODESTRAL = "codestral";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
|
||||
public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
|
||||
|
||||
@Override
|
||||
public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
|
||||
gen.writeString("json");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty(SerializerProvider provider,Boolean value){
|
||||
return !value;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Base64;
|
||||
import java.util.Collection;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
|
||||
public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
|
||||
|
||||
@Override
|
||||
public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException {
|
||||
jsonGenerator.writeStartArray();
|
||||
for (byte[] file : value) {
|
||||
jsonGenerator.writeString(Base64.getEncoder().encodeToString(file));
|
||||
}
|
||||
jsonGenerator.writeEndArray();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package io.github.amithkoujalgi.ollama4j.core.utils;
|
||||
|
||||
import java.net.http.HttpRequest.BodyPublisher;
|
||||
import java.net.http.HttpRequest.BodyPublishers;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
/**
|
||||
* Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}.
|
||||
*/
|
||||
public interface OllamaRequestBody {
|
||||
|
||||
/**
|
||||
* Transforms the OllamaRequest Object to a JSON Object via Jackson.
|
||||
*
|
||||
* @return JSON representation of a OllamaRequest
|
||||
*/
|
||||
@JsonIgnore
|
||||
default BodyPublisher getBodyPublisher(){
|
||||
try {
|
||||
return BodyPublishers.ofString(
|
||||
Utils.getObjectMapper().writeValueAsString(this));
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalArgumentException("Request not Body convertible.",e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user