forked from Mirror/ollama4j
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43f43c9f81 | ||
|
|
65f00defcf | ||
|
|
d716b81342 | ||
|
|
272ba445f6 | ||
|
|
d9816d8869 | ||
|
|
874736eb16 | ||
|
|
9c16ccbf81 | ||
|
|
40a3aa31dc | ||
|
|
90669b611b | ||
|
|
f10c7ac725 | ||
|
|
38dca3cd0d | ||
|
|
44bb35b168 | ||
|
|
9832caf503 | ||
|
|
0c4e8e306e | ||
|
|
075416eb9c | ||
|
|
4260fbbc32 | ||
|
|
0bec697a86 | ||
|
|
4ca6eef8fd | ||
|
|
a635dd9be2 | ||
|
|
14982011d9 | ||
|
|
65d852fdc9 | ||
|
|
d483c23c81 | ||
|
|
273b1e47ca | ||
|
|
5c5cdba4cd | ||
|
|
24674ea483 | ||
|
|
5d3a975e4c | ||
|
|
ad670c3c62 | ||
|
|
f9063484f3 | ||
|
|
5e2a07ad41 | ||
|
|
00a3e51a93 | ||
|
|
bc20468f28 | ||
|
|
c7ac50a805 | ||
|
|
f8cd7bc013 | ||
|
|
3469bf314b | ||
|
|
9636807819 | ||
|
|
455251d1d4 | ||
|
|
ec00ffae7f | ||
|
|
d969c7ad46 | ||
|
|
02bf769188 | ||
|
|
1c8a6b4f2a | ||
|
|
60fe5d6ffb | ||
|
|
327ae7437f | ||
|
|
795b9f2b9b | ||
|
|
54da069e68 | ||
|
|
bfc5cebac1 | ||
|
|
d46b1d48d8 | ||
|
|
96320e7761 | ||
|
|
e6472f0a81 | ||
|
|
816bbd9bbf | ||
|
|
da1123271d | ||
|
|
12f099260f | ||
|
|
35728ae208 |
6
.github/workflows/publish-docs.yml
vendored
6
.github/workflows/publish-docs.yml
vendored
@@ -50,6 +50,12 @@ jobs:
|
|||||||
- name: Build with Maven
|
- name: Build with Maven
|
||||||
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
|
||||||
|
|
||||||
|
- name: Doxygen Action
|
||||||
|
uses: mattnotmitt/doxygen-action@v1.1.0
|
||||||
|
with:
|
||||||
|
doxyfile-path: "./Doxyfile"
|
||||||
|
working-directory: "."
|
||||||
|
|
||||||
- name: Setup Pages
|
- name: Setup Pages
|
||||||
uses: actions/configure-pages@v3
|
uses: actions/configure-pages@v3
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
|
|||||||
413
Doxyfile
Normal file
413
Doxyfile
Normal file
@@ -0,0 +1,413 @@
|
|||||||
|
# Doxyfile 1.10.0
|
||||||
|
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Project related configuration options
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
DOXYFILE_ENCODING = UTF-8
|
||||||
|
PROJECT_NAME = "Ollama4j"
|
||||||
|
PROJECT_NUMBER =
|
||||||
|
PROJECT_BRIEF = "A Java library (wrapper/binding) for Ollama server."
|
||||||
|
PROJECT_LOGO = ./logo-small.png
|
||||||
|
PROJECT_ICON = ./logo-small.png
|
||||||
|
OUTPUT_DIRECTORY = ./docs/build/doxygen
|
||||||
|
CREATE_SUBDIRS = NO
|
||||||
|
CREATE_SUBDIRS_LEVEL = 8
|
||||||
|
ALLOW_UNICODE_NAMES = NO
|
||||||
|
OUTPUT_LANGUAGE = English
|
||||||
|
BRIEF_MEMBER_DESC = YES
|
||||||
|
REPEAT_BRIEF = YES
|
||||||
|
ABBREVIATE_BRIEF = "The $name class" \
|
||||||
|
"The $name widget" \
|
||||||
|
"The $name file" \
|
||||||
|
is \
|
||||||
|
provides \
|
||||||
|
specifies \
|
||||||
|
contains \
|
||||||
|
represents \
|
||||||
|
a \
|
||||||
|
an \
|
||||||
|
the
|
||||||
|
ALWAYS_DETAILED_SEC = NO
|
||||||
|
INLINE_INHERITED_MEMB = NO
|
||||||
|
FULL_PATH_NAMES = YES
|
||||||
|
STRIP_FROM_PATH =
|
||||||
|
STRIP_FROM_INC_PATH =
|
||||||
|
SHORT_NAMES = NO
|
||||||
|
JAVADOC_AUTOBRIEF = NO
|
||||||
|
JAVADOC_BANNER = NO
|
||||||
|
QT_AUTOBRIEF = NO
|
||||||
|
MULTILINE_CPP_IS_BRIEF = NO
|
||||||
|
PYTHON_DOCSTRING = YES
|
||||||
|
INHERIT_DOCS = YES
|
||||||
|
SEPARATE_MEMBER_PAGES = NO
|
||||||
|
TAB_SIZE = 4
|
||||||
|
ALIASES =
|
||||||
|
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||||
|
OPTIMIZE_OUTPUT_JAVA = YES
|
||||||
|
OPTIMIZE_FOR_FORTRAN = NO
|
||||||
|
OPTIMIZE_OUTPUT_VHDL = NO
|
||||||
|
OPTIMIZE_OUTPUT_SLICE = NO
|
||||||
|
EXTENSION_MAPPING =
|
||||||
|
MARKDOWN_SUPPORT = YES
|
||||||
|
TOC_INCLUDE_HEADINGS = 5
|
||||||
|
MARKDOWN_ID_STYLE = DOXYGEN
|
||||||
|
AUTOLINK_SUPPORT = YES
|
||||||
|
BUILTIN_STL_SUPPORT = NO
|
||||||
|
CPP_CLI_SUPPORT = NO
|
||||||
|
SIP_SUPPORT = NO
|
||||||
|
IDL_PROPERTY_SUPPORT = YES
|
||||||
|
DISTRIBUTE_GROUP_DOC = NO
|
||||||
|
GROUP_NESTED_COMPOUNDS = NO
|
||||||
|
SUBGROUPING = YES
|
||||||
|
INLINE_GROUPED_CLASSES = NO
|
||||||
|
INLINE_SIMPLE_STRUCTS = NO
|
||||||
|
TYPEDEF_HIDES_STRUCT = NO
|
||||||
|
LOOKUP_CACHE_SIZE = 0
|
||||||
|
NUM_PROC_THREADS = 1
|
||||||
|
TIMESTAMP = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Build related configuration options
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
EXTRACT_ALL = YES
|
||||||
|
EXTRACT_PRIVATE = NO
|
||||||
|
EXTRACT_PRIV_VIRTUAL = NO
|
||||||
|
EXTRACT_PACKAGE = NO
|
||||||
|
EXTRACT_STATIC = NO
|
||||||
|
EXTRACT_LOCAL_CLASSES = YES
|
||||||
|
EXTRACT_LOCAL_METHODS = NO
|
||||||
|
EXTRACT_ANON_NSPACES = NO
|
||||||
|
RESOLVE_UNNAMED_PARAMS = YES
|
||||||
|
HIDE_UNDOC_MEMBERS = NO
|
||||||
|
HIDE_UNDOC_CLASSES = NO
|
||||||
|
HIDE_FRIEND_COMPOUNDS = NO
|
||||||
|
HIDE_IN_BODY_DOCS = NO
|
||||||
|
INTERNAL_DOCS = NO
|
||||||
|
CASE_SENSE_NAMES = SYSTEM
|
||||||
|
HIDE_SCOPE_NAMES = NO
|
||||||
|
HIDE_COMPOUND_REFERENCE= NO
|
||||||
|
SHOW_HEADERFILE = YES
|
||||||
|
SHOW_INCLUDE_FILES = YES
|
||||||
|
SHOW_GROUPED_MEMB_INC = NO
|
||||||
|
FORCE_LOCAL_INCLUDES = NO
|
||||||
|
INLINE_INFO = YES
|
||||||
|
SORT_MEMBER_DOCS = YES
|
||||||
|
SORT_BRIEF_DOCS = NO
|
||||||
|
SORT_MEMBERS_CTORS_1ST = NO
|
||||||
|
SORT_GROUP_NAMES = NO
|
||||||
|
SORT_BY_SCOPE_NAME = NO
|
||||||
|
STRICT_PROTO_MATCHING = NO
|
||||||
|
GENERATE_TODOLIST = YES
|
||||||
|
GENERATE_TESTLIST = YES
|
||||||
|
GENERATE_BUGLIST = YES
|
||||||
|
GENERATE_DEPRECATEDLIST= YES
|
||||||
|
ENABLED_SECTIONS =
|
||||||
|
MAX_INITIALIZER_LINES = 30
|
||||||
|
SHOW_USED_FILES = YES
|
||||||
|
SHOW_FILES = YES
|
||||||
|
SHOW_NAMESPACES = YES
|
||||||
|
FILE_VERSION_FILTER =
|
||||||
|
LAYOUT_FILE =
|
||||||
|
CITE_BIB_FILES =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to warning and progress messages
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
QUIET = NO
|
||||||
|
WARNINGS = YES
|
||||||
|
WARN_IF_UNDOCUMENTED = YES
|
||||||
|
WARN_IF_DOC_ERROR = YES
|
||||||
|
WARN_IF_INCOMPLETE_DOC = YES
|
||||||
|
WARN_NO_PARAMDOC = NO
|
||||||
|
WARN_IF_UNDOC_ENUM_VAL = NO
|
||||||
|
WARN_AS_ERROR = NO
|
||||||
|
WARN_FORMAT = "$file:$line: $text"
|
||||||
|
WARN_LINE_FORMAT = "at line $line of file $file"
|
||||||
|
WARN_LOGFILE =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the input files
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
INPUT = ./src/main
|
||||||
|
INPUT_ENCODING = UTF-8
|
||||||
|
INPUT_FILE_ENCODING =
|
||||||
|
FILE_PATTERNS = *.c \
|
||||||
|
*.cc \
|
||||||
|
*.cxx \
|
||||||
|
*.cxxm \
|
||||||
|
*.cpp \
|
||||||
|
*.cppm \
|
||||||
|
*.ccm \
|
||||||
|
*.c++ \
|
||||||
|
*.c++m \
|
||||||
|
*.java \
|
||||||
|
*.ii \
|
||||||
|
*.ixx \
|
||||||
|
*.ipp \
|
||||||
|
*.i++ \
|
||||||
|
*.inl \
|
||||||
|
*.idl \
|
||||||
|
*.ddl \
|
||||||
|
*.odl \
|
||||||
|
*.h \
|
||||||
|
*.hh \
|
||||||
|
*.hxx \
|
||||||
|
*.hpp \
|
||||||
|
*.h++ \
|
||||||
|
*.ixx \
|
||||||
|
*.l \
|
||||||
|
*.cs \
|
||||||
|
*.d \
|
||||||
|
*.php \
|
||||||
|
*.php4 \
|
||||||
|
*.php5 \
|
||||||
|
*.phtml \
|
||||||
|
*.inc \
|
||||||
|
*.m \
|
||||||
|
*.markdown \
|
||||||
|
*.md \
|
||||||
|
*.mm \
|
||||||
|
*.dox \
|
||||||
|
*.py \
|
||||||
|
*.pyw \
|
||||||
|
*.f90 \
|
||||||
|
*.f95 \
|
||||||
|
*.f03 \
|
||||||
|
*.f08 \
|
||||||
|
*.f18 \
|
||||||
|
*.f \
|
||||||
|
*.for \
|
||||||
|
*.vhd \
|
||||||
|
*.vhdl \
|
||||||
|
*.ucf \
|
||||||
|
*.qsf \
|
||||||
|
*.ice
|
||||||
|
RECURSIVE = YES
|
||||||
|
EXCLUDE =
|
||||||
|
EXCLUDE_SYMLINKS = NO
|
||||||
|
EXCLUDE_PATTERNS =
|
||||||
|
EXCLUDE_SYMBOLS =
|
||||||
|
EXAMPLE_PATH =
|
||||||
|
EXAMPLE_PATTERNS = *
|
||||||
|
EXAMPLE_RECURSIVE = NO
|
||||||
|
IMAGE_PATH =
|
||||||
|
INPUT_FILTER =
|
||||||
|
FILTER_PATTERNS =
|
||||||
|
FILTER_SOURCE_FILES = NO
|
||||||
|
FILTER_SOURCE_PATTERNS =
|
||||||
|
USE_MDFILE_AS_MAINPAGE =
|
||||||
|
FORTRAN_COMMENT_AFTER = 72
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to source browsing
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
SOURCE_BROWSER = YES
|
||||||
|
INLINE_SOURCES = NO
|
||||||
|
STRIP_CODE_COMMENTS = YES
|
||||||
|
REFERENCED_BY_RELATION = NO
|
||||||
|
REFERENCES_RELATION = NO
|
||||||
|
REFERENCES_LINK_SOURCE = YES
|
||||||
|
SOURCE_TOOLTIPS = YES
|
||||||
|
USE_HTAGS = NO
|
||||||
|
VERBATIM_HEADERS = YES
|
||||||
|
CLANG_ASSISTED_PARSING = NO
|
||||||
|
CLANG_ADD_INC_PATHS = YES
|
||||||
|
CLANG_OPTIONS =
|
||||||
|
CLANG_DATABASE_PATH =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the alphabetical class index
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
ALPHABETICAL_INDEX = YES
|
||||||
|
IGNORE_PREFIX =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the HTML output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_HTML = YES
|
||||||
|
HTML_OUTPUT = html
|
||||||
|
HTML_FILE_EXTENSION = .html
|
||||||
|
HTML_HEADER =
|
||||||
|
HTML_FOOTER =
|
||||||
|
HTML_STYLESHEET =
|
||||||
|
HTML_EXTRA_STYLESHEET =
|
||||||
|
HTML_EXTRA_FILES =
|
||||||
|
HTML_COLORSTYLE = LIGHT
|
||||||
|
HTML_COLORSTYLE_HUE = 220
|
||||||
|
HTML_COLORSTYLE_SAT = 100
|
||||||
|
HTML_COLORSTYLE_GAMMA = 80
|
||||||
|
HTML_DYNAMIC_MENUS = YES
|
||||||
|
HTML_DYNAMIC_SECTIONS = NO
|
||||||
|
HTML_CODE_FOLDING = YES
|
||||||
|
HTML_COPY_CLIPBOARD = YES
|
||||||
|
HTML_PROJECT_COOKIE =
|
||||||
|
HTML_INDEX_NUM_ENTRIES = 100
|
||||||
|
GENERATE_DOCSET = NO
|
||||||
|
DOCSET_FEEDNAME = "Doxygen generated docs"
|
||||||
|
DOCSET_FEEDURL =
|
||||||
|
DOCSET_BUNDLE_ID = org.doxygen.Project
|
||||||
|
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
|
||||||
|
DOCSET_PUBLISHER_NAME = Publisher
|
||||||
|
GENERATE_HTMLHELP = NO
|
||||||
|
CHM_FILE =
|
||||||
|
HHC_LOCATION =
|
||||||
|
GENERATE_CHI = NO
|
||||||
|
CHM_INDEX_ENCODING =
|
||||||
|
BINARY_TOC = NO
|
||||||
|
TOC_EXPAND = NO
|
||||||
|
SITEMAP_URL =
|
||||||
|
GENERATE_QHP = NO
|
||||||
|
QCH_FILE =
|
||||||
|
QHP_NAMESPACE = org.doxygen.Project
|
||||||
|
QHP_VIRTUAL_FOLDER = doc
|
||||||
|
QHP_CUST_FILTER_NAME =
|
||||||
|
QHP_CUST_FILTER_ATTRS =
|
||||||
|
QHP_SECT_FILTER_ATTRS =
|
||||||
|
QHG_LOCATION =
|
||||||
|
GENERATE_ECLIPSEHELP = NO
|
||||||
|
ECLIPSE_DOC_ID = org.doxygen.Project
|
||||||
|
DISABLE_INDEX = NO
|
||||||
|
GENERATE_TREEVIEW = YES
|
||||||
|
FULL_SIDEBAR = NO
|
||||||
|
ENUM_VALUES_PER_LINE = 4
|
||||||
|
TREEVIEW_WIDTH = 250
|
||||||
|
EXT_LINKS_IN_WINDOW = NO
|
||||||
|
OBFUSCATE_EMAILS = YES
|
||||||
|
HTML_FORMULA_FORMAT = png
|
||||||
|
FORMULA_FONTSIZE = 10
|
||||||
|
FORMULA_MACROFILE =
|
||||||
|
USE_MATHJAX = NO
|
||||||
|
MATHJAX_VERSION = MathJax_2
|
||||||
|
MATHJAX_FORMAT = HTML-CSS
|
||||||
|
MATHJAX_RELPATH =
|
||||||
|
MATHJAX_EXTENSIONS =
|
||||||
|
MATHJAX_CODEFILE =
|
||||||
|
SEARCHENGINE = YES
|
||||||
|
SERVER_BASED_SEARCH = NO
|
||||||
|
EXTERNAL_SEARCH = NO
|
||||||
|
SEARCHENGINE_URL =
|
||||||
|
SEARCHDATA_FILE = searchdata.xml
|
||||||
|
EXTERNAL_SEARCH_ID =
|
||||||
|
EXTRA_SEARCH_MAPPINGS =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the LaTeX output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_LATEX = YES
|
||||||
|
LATEX_OUTPUT = latex
|
||||||
|
LATEX_CMD_NAME =
|
||||||
|
MAKEINDEX_CMD_NAME = makeindex
|
||||||
|
LATEX_MAKEINDEX_CMD = makeindex
|
||||||
|
COMPACT_LATEX = NO
|
||||||
|
PAPER_TYPE = a4
|
||||||
|
EXTRA_PACKAGES =
|
||||||
|
LATEX_HEADER =
|
||||||
|
LATEX_FOOTER =
|
||||||
|
LATEX_EXTRA_STYLESHEET =
|
||||||
|
LATEX_EXTRA_FILES =
|
||||||
|
PDF_HYPERLINKS = YES
|
||||||
|
USE_PDFLATEX = YES
|
||||||
|
LATEX_BATCHMODE = NO
|
||||||
|
LATEX_HIDE_INDICES = NO
|
||||||
|
LATEX_BIB_STYLE = plain
|
||||||
|
LATEX_EMOJI_DIRECTORY =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the RTF output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_RTF = NO
|
||||||
|
RTF_OUTPUT = rtf
|
||||||
|
COMPACT_RTF = NO
|
||||||
|
RTF_HYPERLINKS = NO
|
||||||
|
RTF_STYLESHEET_FILE =
|
||||||
|
RTF_EXTENSIONS_FILE =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the man page output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_MAN = NO
|
||||||
|
MAN_OUTPUT = man
|
||||||
|
MAN_EXTENSION = .3
|
||||||
|
MAN_SUBDIR =
|
||||||
|
MAN_LINKS = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the XML output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_XML = NO
|
||||||
|
XML_OUTPUT = xml
|
||||||
|
XML_PROGRAMLISTING = YES
|
||||||
|
XML_NS_MEMB_FILE_SCOPE = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the DOCBOOK output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_DOCBOOK = NO
|
||||||
|
DOCBOOK_OUTPUT = docbook
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options for the AutoGen Definitions output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_AUTOGEN_DEF = NO
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to Sqlite3 output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_SQLITE3 = NO
|
||||||
|
SQLITE3_OUTPUT = sqlite3
|
||||||
|
SQLITE3_RECREATE_DB = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the Perl module output
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
GENERATE_PERLMOD = NO
|
||||||
|
PERLMOD_LATEX = NO
|
||||||
|
PERLMOD_PRETTY = YES
|
||||||
|
PERLMOD_MAKEVAR_PREFIX =
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to the preprocessor
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
ENABLE_PREPROCESSING = YES
|
||||||
|
MACRO_EXPANSION = NO
|
||||||
|
EXPAND_ONLY_PREDEF = NO
|
||||||
|
SEARCH_INCLUDES = YES
|
||||||
|
INCLUDE_PATH =
|
||||||
|
INCLUDE_FILE_PATTERNS =
|
||||||
|
PREDEFINED =
|
||||||
|
EXPAND_AS_DEFINED =
|
||||||
|
SKIP_FUNCTION_MACROS = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to external references
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
TAGFILES =
|
||||||
|
GENERATE_TAGFILE =
|
||||||
|
ALLEXTERNALS = NO
|
||||||
|
EXTERNAL_GROUPS = YES
|
||||||
|
EXTERNAL_PAGES = YES
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
# Configuration options related to diagram generator tools
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
HIDE_UNDOC_RELATIONS = YES
|
||||||
|
HAVE_DOT = NO
|
||||||
|
DOT_NUM_THREADS = 0
|
||||||
|
DOT_COMMON_ATTR = "fontname=Helvetica,fontsize=10"
|
||||||
|
DOT_EDGE_ATTR = "labelfontname=Helvetica,labelfontsize=10"
|
||||||
|
DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
|
||||||
|
DOT_FONTPATH =
|
||||||
|
CLASS_GRAPH = YES
|
||||||
|
COLLABORATION_GRAPH = YES
|
||||||
|
GROUP_GRAPHS = YES
|
||||||
|
UML_LOOK = NO
|
||||||
|
UML_LIMIT_NUM_FIELDS = 10
|
||||||
|
DOT_UML_DETAILS = NO
|
||||||
|
DOT_WRAP_THRESHOLD = 17
|
||||||
|
TEMPLATE_RELATIONS = NO
|
||||||
|
INCLUDE_GRAPH = YES
|
||||||
|
INCLUDED_BY_GRAPH = YES
|
||||||
|
CALL_GRAPH = NO
|
||||||
|
CALLER_GRAPH = NO
|
||||||
|
GRAPHICAL_HIERARCHY = YES
|
||||||
|
DIRECTORY_GRAPH = YES
|
||||||
|
DIR_GRAPH_MAX_DEPTH = 1
|
||||||
|
DOT_IMAGE_FORMAT = png
|
||||||
|
INTERACTIVE_SVG = NO
|
||||||
|
DOT_PATH =
|
||||||
|
DOTFILE_DIRS =
|
||||||
|
DIA_PATH =
|
||||||
|
DIAFILE_DIRS =
|
||||||
|
PLANTUML_JAR_PATH =
|
||||||
|
PLANTUML_CFG_FILE =
|
||||||
|
PLANTUML_INCLUDE_PATH =
|
||||||
|
DOT_GRAPH_MAX_NODES = 50
|
||||||
|
MAX_DOT_GRAPH_DEPTH = 0
|
||||||
|
DOT_MULTI_TARGETS = NO
|
||||||
|
GENERATE_LEGEND = YES
|
||||||
|
DOT_CLEANUP = YES
|
||||||
|
MSCGEN_TOOL =
|
||||||
|
MSCFILE_DIRS =
|
||||||
3
Makefile
3
Makefile
@@ -7,6 +7,9 @@ ut:
|
|||||||
it:
|
it:
|
||||||
mvn clean verify -Pintegration-tests
|
mvn clean verify -Pintegration-tests
|
||||||
|
|
||||||
|
doxygen:
|
||||||
|
doxygen Doxyfile
|
||||||
|
|
||||||
list-releases:
|
list-releases:
|
||||||
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
|
||||||
--compressed \
|
--compressed \
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ In your Maven project, add this dependency:
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.amithkoujalgi</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.29</version>
|
<version>1.0.47</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
sidebar_position: 2
|
sidebar_position: 2
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - Async
|
# Generate - Async
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a asynchronous way.
|
This API lets you ask questions to the LLMs in a asynchronous way.
|
||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
@@ -19,13 +19,13 @@ public class Main {
|
|||||||
|
|
||||||
String prompt = "Who are you?";
|
String prompt = "Who are you?";
|
||||||
|
|
||||||
OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
|
OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
|
||||||
|
|
||||||
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
|
while (!callback.isComplete() || !callback.getStream().isEmpty()) {
|
||||||
// poll for data from the response stream
|
// poll for data from the response stream
|
||||||
String result = callback.getStream().poll();
|
String result = callback.getStream().poll();
|
||||||
if (response != null) {
|
if (result != null) {
|
||||||
System.out.print(result.getResponse());
|
System.out.print(result);
|
||||||
}
|
}
|
||||||
Thread.sleep(100);
|
Thread.sleep(100);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
sidebar_position: 3
|
sidebar_position: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - With Image Files
|
# Generate - With Image Files
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
@@ -15,7 +15,7 @@ recommended.
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Ask (Sync)
|
## Synchronous mode
|
||||||
|
|
||||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
If you have this image downloaded and you pass the path to the downloaded image to the following code:
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
|
OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
|
||||||
"What's in this image?",
|
"What's in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
new File("/path/to/image")));
|
new File("/path/to/image")));
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
sidebar_position: 4
|
sidebar_position: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - With Image URLs
|
# Generate - With Image URLs
|
||||||
|
|
||||||
This API lets you ask questions along with the image files to the LLMs.
|
This API lets you ask questions along with the image files to the LLMs.
|
||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
@@ -29,7 +29,7 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
ollamaAPI.setRequestTimeoutSeconds(10);
|
ollamaAPI.setRequestTimeoutSeconds(10);
|
||||||
|
|
||||||
OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
|
OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
|
||||||
"What's in this image?",
|
"What's in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
sidebar_position: 1
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
# Ask - Sync
|
# Generate - Sync
|
||||||
|
|
||||||
This API lets you ask questions to the LLMs in a synchronous way.
|
This API lets you ask questions to the LLMs in a synchronous way.
|
||||||
These APIs correlate to
|
These APIs correlate to
|
||||||
@@ -25,7 +25,7 @@ public class Main {
|
|||||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
|
||||||
|
|
||||||
System.out.println(result.getResponse());
|
System.out.println(result.getResponse());
|
||||||
}
|
}
|
||||||
@@ -55,7 +55,7 @@ public class Main {
|
|||||||
String prompt = "List all cricket world cup teams of 2019.";
|
String prompt = "List all cricket world cup teams of 2019.";
|
||||||
|
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.ask(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
|
||||||
|
|
||||||
System.out.println(result.getResponse());
|
System.out.println(result.getResponse());
|
||||||
}
|
}
|
||||||
@@ -97,7 +97,7 @@ public class Main {
|
|||||||
SamplePrompts.getSampleDatabasePromptWithQuestion(
|
SamplePrompts.getSampleDatabasePromptWithQuestion(
|
||||||
"List all customer names who have bought one or more products");
|
"List all customer names who have bought one or more products");
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.ask(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
|
||||||
System.out.println(result.getResponse());
|
System.out.println(result.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
98
docs/docs/apis-ask/chat.md
Normal file
98
docs/docs/apis-ask/chat.md
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 7
|
||||||
|
---
|
||||||
|
|
||||||
|
# Chat
|
||||||
|
|
||||||
|
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
|
||||||
|
information using the history of already asked questions and the respective answers.
|
||||||
|
|
||||||
|
## Create a new conversation and use chat history to augment follow up questions
|
||||||
|
|
||||||
|
```java
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
|
||||||
|
// create first user question
|
||||||
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// start conversation with model
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println("First answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
|
// create next userQuestion
|
||||||
|
requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build();
|
||||||
|
|
||||||
|
// "continue" conversation with model
|
||||||
|
chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println("Second answer: " + chatResult.getResponse());
|
||||||
|
|
||||||
|
System.out.println("Chat History: " + chatResult.getChatHistory());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> First answer: Should be Paris!
|
||||||
|
>
|
||||||
|
> Second answer: Marseille.
|
||||||
|
>
|
||||||
|
> Chat History:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[ {
|
||||||
|
"role" : "user",
|
||||||
|
"content" : "What is the capital of France?",
|
||||||
|
"images" : [ ]
|
||||||
|
}, {
|
||||||
|
"role" : "assistant",
|
||||||
|
"content" : "Should be Paris!",
|
||||||
|
"images" : [ ]
|
||||||
|
}, {
|
||||||
|
"role" : "user",
|
||||||
|
"content" : "And what is the second largest city?",
|
||||||
|
"images" : [ ]
|
||||||
|
}, {
|
||||||
|
"role" : "assistant",
|
||||||
|
"content" : "Marseille.",
|
||||||
|
"images" : [ ]
|
||||||
|
} ]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create a new conversation with individual system prompt
|
||||||
|
```java
|
||||||
|
public class Main {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
||||||
|
|
||||||
|
// create request with system-prompt (overriding the model defaults) and user question
|
||||||
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
|
.withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// start conversation with model
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
|
||||||
|
System.out.println(chatResult.getResponse());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
You will get a response similar to:
|
||||||
|
|
||||||
|
> NI.
|
||||||
@@ -42,7 +42,7 @@ public class AskPhi {
|
|||||||
.addSeparator()
|
.addSeparator()
|
||||||
.add("How do I read a file in Go and print its contents to stdout?");
|
.add("How do I read a file in Go and print its contents to stdout?");
|
||||||
|
|
||||||
OllamaResult response = ollamaAPI.ask(model, promptBuilder.build());
|
OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
|
||||||
System.out.println(response.getResponse());
|
System.out.println(response.getResponse());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ const config = {
|
|||||||
label: 'Docs',
|
label: 'Docs',
|
||||||
},
|
},
|
||||||
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
{to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
|
||||||
|
{to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
|
||||||
{to: '/blog', label: 'Blog', position: 'left'},
|
{to: '/blog', label: 'Blog', position: 'left'},
|
||||||
{
|
{
|
||||||
href: 'https://github.com/amithkoujalgi/ollama4j',
|
href: 'https://github.com/amithkoujalgi/ollama4j',
|
||||||
|
|||||||
BIN
logo-small.png
Normal file
BIN
logo-small.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 5.0 KiB |
4
pom.xml
4
pom.xml
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.amithkoujalgi</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.43</version>
|
<version>1.0.51</version>
|
||||||
|
|
||||||
<name>Ollama4j</name>
|
<name>Ollama4j</name>
|
||||||
<description>Java library for interacting with Ollama API.</description>
|
<description>Java library for interacting with Ollama API.</description>
|
||||||
@@ -39,7 +39,7 @@
|
|||||||
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
<connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
|
||||||
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
<developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
|
||||||
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
<url>https://github.com/amithkoujalgi/ollama4j</url>
|
||||||
<tag>v1.0.43</tag>
|
<tag>v1.0.51</tag>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|||||||
@@ -2,10 +2,16 @@ package io.github.amithkoujalgi.ollama4j.core;
|
|||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.*;
|
import io.github.amithkoujalgi.ollama4j.core.models.*;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
@@ -329,7 +335,8 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question to a model running on Ollama server. This is a sync/blocking call.
|
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
||||||
|
* call.
|
||||||
*
|
*
|
||||||
* @param model the ollama model to ask the question to
|
* @param model the ollama model to ask the question to
|
||||||
* @param prompt the prompt/question text
|
* @param prompt the prompt/question text
|
||||||
@@ -338,23 +345,23 @@ public class OllamaAPI {
|
|||||||
* details on the options</a>
|
* details on the options</a>
|
||||||
* @return OllamaResult that includes response text and time taken for response
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
*/
|
*/
|
||||||
public OllamaResult ask(String model, String prompt, Options options)
|
public OllamaResult generate(String model, String prompt, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return askSync(ollamaRequestModel);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question to a model running on Ollama server and get a callback handle that can be used
|
* Generate response for a question to a model running on Ollama server and get a callback handle
|
||||||
* to check for status and get the response from the model later. This would be an
|
* that can be used to check for status and get the response from the model later. This would be
|
||||||
* async/non-blocking call.
|
* an async/non-blocking call.
|
||||||
*
|
*
|
||||||
* @param model the ollama model to ask the question to
|
* @param model the ollama model to ask the question to
|
||||||
* @param prompt the prompt/question text
|
* @param prompt the prompt/question text
|
||||||
* @return the ollama async result callback handle
|
* @return the ollama async result callback handle
|
||||||
*/
|
*/
|
||||||
public OllamaAsyncResultCallback askAsync(String model, String prompt) {
|
public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
|
||||||
|
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
URI uri = URI.create(this.host + "/api/generate");
|
||||||
@@ -377,7 +384,7 @@ public class OllamaAPI {
|
|||||||
* details on the options</a>
|
* details on the options</a>
|
||||||
* @return OllamaResult that includes response text and time taken for response
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
*/
|
*/
|
||||||
public OllamaResult askWithImageFiles(
|
public OllamaResult generateWithImageFiles(
|
||||||
String model, String prompt, List<File> imageFiles, Options options)
|
String model, String prompt, List<File> imageFiles, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
List<String> images = new ArrayList<>();
|
List<String> images = new ArrayList<>();
|
||||||
@@ -386,7 +393,7 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return askSync(ollamaRequestModel);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -401,7 +408,7 @@ public class OllamaAPI {
|
|||||||
* details on the options</a>
|
* details on the options</a>
|
||||||
* @return OllamaResult that includes response text and time taken for response
|
* @return OllamaResult that includes response text and time taken for response
|
||||||
*/
|
*/
|
||||||
public OllamaResult askWithImageURLs(
|
public OllamaResult generateWithImageURLs(
|
||||||
String model, String prompt, List<String> imageURLs, Options options)
|
String model, String prompt, List<String> imageURLs, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
List<String> images = new ArrayList<>();
|
List<String> images = new ArrayList<>();
|
||||||
@@ -410,9 +417,50 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return askSync(ollamaRequestModel);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
||||||
|
* 'api/chat'.
|
||||||
|
*
|
||||||
|
* @param model the ollama model to ask the question to
|
||||||
|
* @param messages chat history / message stack to send to the model
|
||||||
|
* @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response.
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException{
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
|
||||||
|
return chat(builder.withMessages(messages).build());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||||
|
*
|
||||||
|
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||||
|
*
|
||||||
|
* @param request request object to be sent to the server
|
||||||
|
* @return
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException{
|
||||||
|
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
//TODO: implement async way
|
||||||
|
if(request.isStream()){
|
||||||
|
throw new UnsupportedOperationException("Streamed chat responses are not implemented yet");
|
||||||
|
}
|
||||||
|
OllamaResult result = requestCaller.generateSync(request);
|
||||||
|
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
||||||
|
}
|
||||||
|
|
||||||
|
// technical private methods //
|
||||||
|
|
||||||
private static String encodeFileToBase64(File file) throws IOException {
|
private static String encodeFileToBase64(File file) throws IOException {
|
||||||
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
|
||||||
}
|
}
|
||||||
@@ -435,57 +483,10 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
|
private OllamaResult generateSyncForOllamaRequestModel(OllamaRequestModel ollamaRequestModel)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
long startTime = System.currentTimeMillis();
|
OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
return requestCaller.generateSync(ollamaRequestModel);
|
||||||
URI uri = URI.create(this.host + "/api/generate");
|
|
||||||
HttpRequest.Builder requestBuilder =
|
|
||||||
getRequestBuilderDefault(uri)
|
|
||||||
.POST(
|
|
||||||
HttpRequest.BodyPublishers.ofString(
|
|
||||||
Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)));
|
|
||||||
HttpRequest request = requestBuilder.build();
|
|
||||||
if (verbose) logger.info("Asking model: " + ollamaRequestModel);
|
|
||||||
HttpResponse<InputStream> response =
|
|
||||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
|
||||||
int statusCode = response.statusCode();
|
|
||||||
InputStream responseBodyStream = response.body();
|
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
|
||||||
try (BufferedReader reader =
|
|
||||||
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
|
||||||
String line;
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
if (statusCode == 404) {
|
|
||||||
logger.warn("Status code: 404 (Not Found)");
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
} else if (statusCode == 401) {
|
|
||||||
logger.warn("Status code: 401 (Unauthorized)");
|
|
||||||
OllamaErrorResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper()
|
|
||||||
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
|
||||||
} else {
|
|
||||||
OllamaResponseModel ollamaResponseModel =
|
|
||||||
Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
|
||||||
if (!ollamaResponseModel.isDone()) {
|
|
||||||
responseBuffer.append(ollamaResponseModel.getResponse());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (statusCode != 200) {
|
|
||||||
logger.error("Status code " + statusCode);
|
|
||||||
throw new OllamaBaseException(responseBuffer.toString());
|
|
||||||
} else {
|
|
||||||
long endTime = System.currentTimeMillis();
|
|
||||||
OllamaResult ollamaResult =
|
|
||||||
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
|
||||||
if (verbose) logger.info("Model response: " + ollamaResult);
|
|
||||||
return ollamaResult;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import lombok.Data;
|
|||||||
public class Model {
|
public class Model {
|
||||||
|
|
||||||
private String name;
|
private String name;
|
||||||
|
private String model;
|
||||||
@JsonProperty("modified_at")
|
@JsonProperty("modified_at")
|
||||||
private String modifiedAt;
|
private String modifiedAt;
|
||||||
private String digest;
|
private String digest;
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
|
package io.github.amithkoujalgi.ollama4j.core.models;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import java.util.List;
|
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
|||||||
@@ -3,12 +3,15 @@ package io.github.amithkoujalgi.ollama4j.core.models;
|
|||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
public class OllamaRequestModel {
|
public class OllamaRequestModel implements OllamaRequestBody{
|
||||||
|
|
||||||
private String model;
|
private String model;
|
||||||
private String prompt;
|
private String prompt;
|
||||||
|
|||||||
@@ -0,0 +1,41 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.List;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
|
||||||
|
*
|
||||||
|
* @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@NoArgsConstructor
|
||||||
|
public class OllamaChatMessage {
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private OllamaChatMessageRole role;
|
||||||
|
|
||||||
|
@NonNull
|
||||||
|
private String content;
|
||||||
|
|
||||||
|
private List<File> images;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonValue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the possible Chat Message roles.
|
||||||
|
*/
|
||||||
|
public enum OllamaChatMessageRole {
|
||||||
|
SYSTEM("system"),
|
||||||
|
USER("user"),
|
||||||
|
ASSISTANT("assistant");
|
||||||
|
|
||||||
|
@JsonValue
|
||||||
|
private String roleName;
|
||||||
|
|
||||||
|
private OllamaChatMessageRole(String roleName){
|
||||||
|
this.roleName = roleName;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,68 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
|
||||||
|
*/
|
||||||
|
public class OllamaChatRequestBuilder {
|
||||||
|
|
||||||
|
private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
|
||||||
|
request = new OllamaChatRequestModel(model, messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
private OllamaChatRequestModel request;
|
||||||
|
|
||||||
|
public static OllamaChatRequestBuilder getInstance(String model){
|
||||||
|
return new OllamaChatRequestBuilder(model, new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestModel build(){
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reset(){
|
||||||
|
request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, File... images){
|
||||||
|
List<OllamaChatMessage> messages = this.request.getMessages();
|
||||||
|
messages.add(new OllamaChatMessage(role,content,List.of(images)));
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
|
||||||
|
this.request.getMessages().addAll(messages);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withOptions(Options options){
|
||||||
|
this.request.setOptions(options);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withFormat(String format){
|
||||||
|
this.request.setFormat(format);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withTemplate(String template){
|
||||||
|
this.request.setTemplate(template);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withStreaming(){
|
||||||
|
this.request.setStream(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaChatRequestBuilder withKeepAlive(String keepAlive){
|
||||||
|
this.request.setKeepAlive(keepAlive);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||||
|
|
||||||
|
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
|
||||||
|
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines a Request to use against the ollama /api/chat endpoint.
|
||||||
|
*
|
||||||
|
* @see <a
|
||||||
|
* href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
|
||||||
|
* Chat Completion</a>
|
||||||
|
*/
|
||||||
|
@Data
|
||||||
|
@AllArgsConstructor
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class OllamaChatRequestModel implements OllamaRequestBody {
|
||||||
|
|
||||||
|
@NonNull private String model;
|
||||||
|
|
||||||
|
@NonNull private List<OllamaChatMessage> messages;
|
||||||
|
|
||||||
|
private String format;
|
||||||
|
private Options options;
|
||||||
|
private String template;
|
||||||
|
private boolean stream;
|
||||||
|
private String keepAlive;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
try {
|
||||||
|
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class OllamaChatResponseModel {
|
||||||
|
private String model;
|
||||||
|
private @JsonProperty("created_at") String createdAt;
|
||||||
|
private OllamaChatMessage message;
|
||||||
|
private boolean done;
|
||||||
|
private List<Integer> context;
|
||||||
|
private @JsonProperty("total_duration") Long totalDuration;
|
||||||
|
private @JsonProperty("load_duration") Long loadDuration;
|
||||||
|
private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
|
||||||
|
private @JsonProperty("eval_duration") Long evalDuration;
|
||||||
|
private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
|
||||||
|
private @JsonProperty("eval_count") Integer evalCount;
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.chat;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
|
||||||
|
* {@link OllamaChatMessageRole#ASSISTANT} role.
|
||||||
|
*/
|
||||||
|
public class OllamaChatResult extends OllamaResult{
|
||||||
|
|
||||||
|
private List<OllamaChatMessage> chatHistory;
|
||||||
|
|
||||||
|
public OllamaChatResult(String response, long responseTime, int httpStatusCode,
|
||||||
|
List<OllamaChatMessage> chatHistory) {
|
||||||
|
super(response, responseTime, httpStatusCode);
|
||||||
|
this.chatHistory = chatHistory;
|
||||||
|
appendAnswerToChatHistory(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<OllamaChatMessage> getChatHistory() {
|
||||||
|
return chatHistory;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void appendAnswerToChatHistory(String answer){
|
||||||
|
OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
|
||||||
|
this.chatHistory.add(assistantMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialization class for requests
|
||||||
|
*/
|
||||||
|
public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
|
||||||
|
|
||||||
|
public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEndpointSuffix() {
|
||||||
|
return "/api/chat";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
|
try {
|
||||||
|
OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getMessage().getContent());
|
||||||
|
return ollamaResponseModel.isDone();
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
LOG.error("Error parsing the Ollama chat response!",e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,150 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.Base64;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract helperclass to call the ollama api server.
|
||||||
|
*/
|
||||||
|
public abstract class OllamaEndpointCaller {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
|
|
||||||
|
private String host;
|
||||||
|
private BasicAuth basicAuth;
|
||||||
|
private long requestTimeoutSeconds;
|
||||||
|
private boolean verbose;
|
||||||
|
|
||||||
|
public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
this.host = host;
|
||||||
|
this.basicAuth = basicAuth;
|
||||||
|
this.requestTimeoutSeconds = requestTimeoutSeconds;
|
||||||
|
this.verbose = verbose;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract String getEndpointSuffix();
|
||||||
|
|
||||||
|
protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
|
||||||
|
*
|
||||||
|
* @param body POST body payload
|
||||||
|
* @return result answer given by the assistant
|
||||||
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
|
* @throws IOException in case the responseStream can not be read
|
||||||
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
|
*/
|
||||||
|
public OllamaResult generateSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{
|
||||||
|
|
||||||
|
// Create Request
|
||||||
|
long startTime = System.currentTimeMillis();
|
||||||
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
URI uri = URI.create(this.host + getEndpointSuffix());
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
getRequestBuilderDefault(uri)
|
||||||
|
.POST(
|
||||||
|
body.getBodyPublisher());
|
||||||
|
HttpRequest request = requestBuilder.build();
|
||||||
|
if (this.verbose) LOG.info("Asking model: " + body.toString());
|
||||||
|
HttpResponse<InputStream> response =
|
||||||
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
|
||||||
|
|
||||||
|
int statusCode = response.statusCode();
|
||||||
|
InputStream responseBodyStream = response.body();
|
||||||
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
|
try (BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
if (statusCode == 404) {
|
||||||
|
LOG.warn("Status code: 404 (Not Found)");
|
||||||
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else if (statusCode == 401) {
|
||||||
|
LOG.warn("Status code: 401 (Unauthorized)");
|
||||||
|
OllamaErrorResponseModel ollamaResponseModel =
|
||||||
|
Utils.getObjectMapper()
|
||||||
|
.readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
|
} else {
|
||||||
|
boolean finished = parseResponseAndAddToBuffer(line,responseBuffer);
|
||||||
|
if (finished) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode != 200) {
|
||||||
|
LOG.error("Status code " + statusCode);
|
||||||
|
throw new OllamaBaseException(responseBuffer.toString());
|
||||||
|
} else {
|
||||||
|
long endTime = System.currentTimeMillis();
|
||||||
|
OllamaResult ollamaResult =
|
||||||
|
new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
|
||||||
|
if (verbose) LOG.info("Model response: " + ollamaResult);
|
||||||
|
return ollamaResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get default request builder.
|
||||||
|
*
|
||||||
|
* @param uri URI to get a HttpRequest.Builder
|
||||||
|
* @return HttpRequest.Builder
|
||||||
|
*/
|
||||||
|
private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
|
||||||
|
HttpRequest.Builder requestBuilder =
|
||||||
|
HttpRequest.newBuilder(uri)
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
|
||||||
|
if (isBasicAuthCredentialsSet()) {
|
||||||
|
requestBuilder.header("Authorization", getBasicAuthHeaderValue());
|
||||||
|
}
|
||||||
|
return requestBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get basic authentication header value.
|
||||||
|
*
|
||||||
|
* @return basic authentication header value (encoded credentials)
|
||||||
|
*/
|
||||||
|
private String getBasicAuthHeaderValue() {
|
||||||
|
String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
|
||||||
|
return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Basic Auth credentials set.
|
||||||
|
*
|
||||||
|
* @return true when Basic Auth credentials set
|
||||||
|
*/
|
||||||
|
private boolean isBasicAuthCredentialsSet() {
|
||||||
|
return this.basicAuth != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
|
||||||
|
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
|
||||||
|
|
||||||
|
public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
|
||||||
|
super(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEndpointSuffix() {
|
||||||
|
return "/api/generate";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
|
||||||
|
try {
|
||||||
|
OllamaResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
|
||||||
|
responseBuffer.append(ollamaResponseModel.getResponse());
|
||||||
|
return ollamaResponseModel.isDone();
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
LOG.error("Error parsing the Ollama chat response!",e);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
package io.github.amithkoujalgi.ollama4j.core.utils;
|
||||||
|
|
||||||
|
import java.net.http.HttpRequest.BodyPublisher;
|
||||||
|
import java.net.http.HttpRequest.BodyPublishers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}.
|
||||||
|
*/
|
||||||
|
public interface OllamaRequestBody {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transforms the OllamaRequest Object to a JSON Object via Jackson.
|
||||||
|
*
|
||||||
|
* @return JSON representation of a OllamaRequest
|
||||||
|
*/
|
||||||
|
@JsonIgnore
|
||||||
|
default BodyPublisher getBodyPublisher(){
|
||||||
|
try {
|
||||||
|
return BodyPublishers.ofString(
|
||||||
|
Utils.getObjectMapper().writeValueAsString(this));
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new IllegalArgumentException("Request not Body convertible.",e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,10 @@ import static org.junit.jupiter.api.Assertions.*;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@@ -16,26 +19,14 @@ import java.net.http.HttpConnectTimeoutException;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import lombok.Data;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Order;
|
import org.junit.jupiter.api.Order;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
class TestRealAPIs {
|
class TestRealAPIs {
|
||||||
OllamaAPI ollamaAPI;
|
OllamaAPI ollamaAPI;
|
||||||
|
Config config;
|
||||||
private Properties loadProperties() {
|
|
||||||
Properties properties = new Properties();
|
|
||||||
try (InputStream input =
|
|
||||||
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
|
||||||
if (input == null) {
|
|
||||||
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
|
||||||
}
|
|
||||||
properties.load(input);
|
|
||||||
return properties;
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException("Error loading properties", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private File getImageFileFromClasspath(String fileName) {
|
private File getImageFileFromClasspath(String fileName) {
|
||||||
ClassLoader classLoader = getClass().getClassLoader();
|
ClassLoader classLoader = getClass().getClassLoader();
|
||||||
@@ -44,9 +35,9 @@ class TestRealAPIs {
|
|||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
void setUp() {
|
void setUp() {
|
||||||
Properties properties = loadProperties();
|
config = new Config();
|
||||||
ollamaAPI = new OllamaAPI(properties.getProperty("ollama.api.url"));
|
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
||||||
ollamaAPI.setRequestTimeoutSeconds(20);
|
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -85,10 +76,10 @@ class TestRealAPIs {
|
|||||||
void testPullModel() {
|
void testPullModel() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
ollamaAPI.pullModel(OllamaModelType.LLAMA2);
|
ollamaAPI.pullModel(config.getModel());
|
||||||
boolean found =
|
boolean found =
|
||||||
ollamaAPI.listModels().stream()
|
ollamaAPI.listModels().stream()
|
||||||
.anyMatch(model -> model.getModelName().equals(OllamaModelType.LLAMA2));
|
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
||||||
assertTrue(found);
|
assertTrue(found);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
@@ -101,8 +92,8 @@ class TestRealAPIs {
|
|||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.ask(
|
ollamaAPI.generate(
|
||||||
OllamaModelType.LLAMA2,
|
config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().build());
|
new OptionsBuilder().build());
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
@@ -119,8 +110,8 @@ class TestRealAPIs {
|
|||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.ask(
|
ollamaAPI.generate(
|
||||||
OllamaModelType.LLAMA2,
|
config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().setTemperature(0.9f).build());
|
new OptionsBuilder().setTemperature(0.9f).build());
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
@@ -131,6 +122,46 @@ class TestRealAPIs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testChat() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
|
||||||
|
.withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
|
||||||
|
.withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
assertNotNull(chatResult);
|
||||||
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
|
assertEquals(4,chatResult.getChatHistory().size());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(3)
|
||||||
|
void testChatWithSystemPrompt() {
|
||||||
|
testEndpointReachability();
|
||||||
|
try {
|
||||||
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
|
.withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
|
||||||
|
assertNotNull(chatResult);
|
||||||
|
assertFalse(chatResult.getResponse().isBlank());
|
||||||
|
assertTrue(chatResult.getResponse().startsWith("NI"));
|
||||||
|
assertEquals(3,chatResult.getChatHistory().size());
|
||||||
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(3)
|
@Order(3)
|
||||||
void testAskModelWithOptionsAndImageFiles() {
|
void testAskModelWithOptionsAndImageFiles() {
|
||||||
@@ -138,8 +169,8 @@ class TestRealAPIs {
|
|||||||
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.askWithImageFiles(
|
ollamaAPI.generateWithImageFiles(
|
||||||
OllamaModelType.LLAVA,
|
config.getImageModel(),
|
||||||
"What is in this image?",
|
"What is in this image?",
|
||||||
List.of(imageFile),
|
List.of(imageFile),
|
||||||
new OptionsBuilder().build());
|
new OptionsBuilder().build());
|
||||||
@@ -157,8 +188,8 @@ class TestRealAPIs {
|
|||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.askWithImageURLs(
|
ollamaAPI.generateWithImageURLs(
|
||||||
OllamaModelType.LLAVA,
|
config.getImageModel(),
|
||||||
"What is in this image?",
|
"What is in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
||||||
@@ -171,3 +202,29 @@ class TestRealAPIs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
class Config {
|
||||||
|
private String ollamaURL;
|
||||||
|
private String model;
|
||||||
|
private String imageModel;
|
||||||
|
private int requestTimeoutSeconds;
|
||||||
|
|
||||||
|
public Config() {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
try (InputStream input =
|
||||||
|
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
||||||
|
if (input == null) {
|
||||||
|
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
||||||
|
}
|
||||||
|
properties.load(input);
|
||||||
|
this.ollamaURL = properties.getProperty("ollama.url");
|
||||||
|
this.model = properties.getProperty("ollama.model");
|
||||||
|
this.imageModel = properties.getProperty("ollama.model.image");
|
||||||
|
this.requestTimeoutSeconds =
|
||||||
|
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException("Error loading properties", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -103,10 +103,10 @@ class TestMockedAPIs {
|
|||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
OptionsBuilder optionsBuilder = new OptionsBuilder();
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.ask(model, prompt, optionsBuilder.build()))
|
when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.ask(model, prompt, optionsBuilder.build());
|
ollamaAPI.generate(model, prompt, optionsBuilder.build());
|
||||||
verify(ollamaAPI, times(1)).ask(model, prompt, optionsBuilder.build());
|
verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
@@ -118,13 +118,14 @@ class TestMockedAPIs {
|
|||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.askWithImageFiles(
|
when(ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.askWithImageFiles(
|
ollamaAPI.generateWithImageFiles(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.askWithImageFiles(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
.generateWithImageFiles(
|
||||||
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
@@ -136,13 +137,14 @@ class TestMockedAPIs {
|
|||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.askWithImageURLs(
|
when(ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
|
||||||
.thenReturn(new OllamaResult("", 0, 200));
|
.thenReturn(new OllamaResult("", 0, 200));
|
||||||
ollamaAPI.askWithImageURLs(
|
ollamaAPI.generateWithImageURLs(
|
||||||
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
verify(ollamaAPI, times(1))
|
verify(ollamaAPI, times(1))
|
||||||
.askWithImageURLs(model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
.generateWithImageURLs(
|
||||||
|
model, prompt, Collections.emptyList(), new OptionsBuilder().build());
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
@@ -153,9 +155,9 @@ class TestMockedAPIs {
|
|||||||
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
|
||||||
String model = OllamaModelType.LLAMA2;
|
String model = OllamaModelType.LLAMA2;
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
when(ollamaAPI.askAsync(model, prompt))
|
when(ollamaAPI.generateAsync(model, prompt))
|
||||||
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
.thenReturn(new OllamaAsyncResultCallback(null, null, 3));
|
||||||
ollamaAPI.askAsync(model, prompt);
|
ollamaAPI.generateAsync(model, prompt);
|
||||||
verify(ollamaAPI, times(1)).askAsync(model, prompt);
|
verify(ollamaAPI, times(1)).generateAsync(model, prompt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,2 +1,4 @@
|
|||||||
ollama.api.url=http://192.168.29.223:11434
|
ollama.url=http://localhost:11434
|
||||||
ollama.model=llava
|
ollama.model=qwen:0.5b
|
||||||
|
ollama.model.image=llava
|
||||||
|
ollama.request-timeout-seconds=120
|
||||||
Reference in New Issue
Block a user