forked from Mirror/ollama4j
		
	Compare commits
	
		
			163 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					e33ad1a1e3 | ||
| 
						 | 
					cd60c506cb | ||
| 
						 | 
					b55925df28 | ||
| 
						 | 
					3a9b8c309d | ||
| 
						 | 
					bf07159522 | ||
| 
						 | 
					f8ca4d041d | ||
| 
						 | 
					9c6a55f7b0 | ||
| 
						 | 
					2866d83a2f | ||
| 
						 | 
					45e5d07581 | ||
| 
						 | 
					3a264cb6bb | ||
| 
						 | 
					e1b9d42771 | ||
| 
						 | 
					1a086c37c0 | ||
| 
						 | 
					54edba144c | ||
| 
						 | 
					3ed3187ba9 | ||
| 
						 | 
					b7cd81a7f5 | ||
| 
						 | 
					e750c2d7f9 | ||
| 
						 | 
					62f16131f3 | ||
| 
						 | 
					2cbaf12d7c | ||
| 
						 | 
					e2d555d404 | ||
| 
						 | 
					c296b34174 | ||
| 
						 | 
					e8f99f28ec | ||
| 
						 | 
					250b1abc79 | ||
| 
						 | 
					42b15ad93f | ||
| 
						 | 
					6f7a714bae | ||
| 
						 | 
					92618e5084 | ||
| 
						 | 
					391a9242c3 | ||
| 
						 | 
					e1b6dc3b54 | ||
| 
						 | 
					04124cf978 | ||
| 
						 | 
					e4e717b747 | ||
| 
						 | 
					10d2a8f5ff | ||
| 
						 | 
					899fa38805 | ||
| 
						 | 
					2df878c953 | ||
| 
						 | 
					78a5eedc8f | ||
| 
						 | 
					364f961ee2 | ||
| 
						 | 
					b21aa6add2 | ||
| 
						 | 
					ec4abd1c2d | ||
| 
						 | 
					9900ae92fb | ||
| 
						 | 
					fa20daf6e5 | ||
| 
						 | 
					44949c0559 | ||
| 
						 | 
					e88711a017 | ||
| 
						 | 
					32169ded18 | ||
| 
						 | 
					4b2d566fd9 | ||
| 
						 | 
					fb4b7a7ce5 | ||
| 
						 | 
					18f27775b0 | ||
| 
						 | 
					cb462ad05a | ||
| 
						 | 
					1eec22ca1a | ||
| 
						 | 
					c1f3c51f88 | ||
| 
						 | 
					7dd556293f | ||
| 
						 | 
					ee50131ce4 | ||
| 
						 | 
					2cd47dbfaa | ||
| 
						 | 
					e5296c1067 | ||
| 
						 | 
					0f00f05e3d | ||
| 
						 | 
					976a3b82e5 | ||
| 
						 | 
					ba26d620c4 | ||
| 
						 | 
					e45246a767 | ||
| 
						 | 
					7336668f0c | ||
| 
						 | 
					11701fb222 | ||
| 
						 | 
					b1ec12c4e9 | ||
| 
						 | 
					d0b0a0fc97 | ||
| 
						 | 
					20774fca6b | ||
| 
						 | 
					9c46b510d8 | ||
| 
						 | 
					9d887b60a8 | ||
| 
						 | 
					63d4de4e24 | ||
| 
						 | 
					a10692e2f1 | ||
| 
						 | 
					b0c152a42e | ||
| 
						 | 
					f44767e023 | ||
| 
						 | 
					aadef0a57c | ||
| 
						 | 
					777ee7ffe0 | ||
| 
						 | 
					dcf1d0bdbc | ||
| 
						 | 
					13b7111a42 | ||
| 
						 | 
					09442d37a3 | ||
| 
						 | 
					1e66bdb07f | ||
| 
						 | 
					b423090db9 | ||
| 
						 | 
					a32d94efbf | ||
| 
						 | 
					31f8302849 | ||
| 
						 | 
					6487756764 | ||
| 
						 | 
					abb76ad867 | ||
| 
						 | 
					cf4e7a96e8 | ||
| 
						 | 
					0f414f71a3 | ||
| 
						 | 
					2b700fdad8 | ||
| 
						 | 
					06c5daa253 | ||
| 
						 | 
					91aab6cbd1 | ||
| 
						 | 
					f38a00ebdc | ||
| 
						 | 
					0f73ea75ab | ||
| 
						 | 
					8fe869afdb | ||
| 
						 | 
					2d274c4f5b | ||
| 
						 | 
					713a3239a4 | ||
| 
						 | 
					a9e7958d44 | ||
| 
						 | 
					f38e84053f | ||
| 
						 | 
					7eb16b7ba0 | ||
| 
						 | 
					5a3889d8ee | ||
| 
						 | 
					2c52f4d0bb | ||
| 
						 | 
					32c4231eb5 | ||
| 
						 | 
					e9621f054d | ||
| 
						 | 
					b41b62220c | ||
| 
						 | 
					c89440cbca | ||
| 
						 | 
					1aeb555a53 | ||
| 
						 | 
					9aff3ec5d9 | ||
| 
						 | 
					b4eaf0cfb5 | ||
| 
						 | 
					199cb6082d | ||
| 
						 | 
					37bfe26a6d | ||
| 
						 | 
					3769386539 | ||
| 
						 | 
					84a6e57f42 | ||
| 
						 | 
					14d2474ee9 | ||
| 
						 | 
					ca613ed80a | ||
| 
						 | 
					bbcd458849 | ||
| 
						 | 
					bc885894f8 | ||
| 
						 | 
					bc83df6971 | ||
| 
						 | 
					43f43c9f81 | ||
| 
						 | 
					65f00defcf | ||
| 
						 | 
					d716b81342 | ||
| 
						 | 
					272ba445f6 | ||
| 
						 | 
					d9816d8869 | ||
| 
						 | 
					874736eb16 | ||
| 
						 | 
					9c16ccbf81 | ||
| 
						 | 
					40a3aa31dc | ||
| 
						 | 
					90669b611b | ||
| 
						 | 
					f10c7ac725 | ||
| 
						 | 
					38dca3cd0d | ||
| 
						 | 
					44bb35b168 | ||
| 
						 | 
					9832caf503 | ||
| 
						 | 
					0c4e8e306e | ||
| 
						 | 
					075416eb9c | ||
| 
						 | 
					4260fbbc32 | ||
| 
						 | 
					0bec697a86 | ||
| 
						 | 
					4ca6eef8fd | ||
| 
						 | 
					a635dd9be2 | ||
| 
						 | 
					14982011d9 | ||
| 
						 | 
					65d852fdc9 | ||
| 
						 | 
					d483c23c81 | ||
| 
						 | 
					273b1e47ca | ||
| 
						 | 
					5c5cdba4cd | ||
| 
						 | 
					24674ea483 | ||
| 
						 | 
					5d3a975e4c | ||
| 
						 | 
					ad670c3c62 | ||
| 
						 | 
					f9063484f3 | ||
| 
						 | 
					5e2a07ad41 | ||
| 
						 | 
					00a3e51a93 | ||
| 
						 | 
					bc20468f28 | ||
| 
						 | 
					c7ac50a805 | ||
| 
						 | 
					f8cd7bc013 | ||
| 
						 | 
					3469bf314b | ||
| 
						 | 
					9636807819 | ||
| 
						 | 
					455251d1d4 | ||
| 
						 | 
					ec00ffae7f | ||
| 
						 | 
					d969c7ad46 | ||
| 
						 | 
					02bf769188 | ||
| 
						 | 
					1c8a6b4f2a | ||
| 
						 | 
					60fe5d6ffb | ||
| 
						 | 
					327ae7437f | ||
| 
						 | 
					795b9f2b9b | ||
| 
						 | 
					54da069e68 | ||
| 
						 | 
					bfc5cebac1 | ||
| 
						 | 
					d46b1d48d8 | ||
| 
						 | 
					96320e7761 | ||
| 
						 | 
					e6472f0a81 | ||
| 
						 | 
					816bbd9bbf | ||
| 
						 | 
					da1123271d | ||
| 
						 | 
					12f099260f | ||
| 
						 | 
					35728ae208 | ||
| 
						 | 
					7dba9cc798 | ||
| 
						 | 
					bb1c920e22 | ||
| 
						 | 
					770cbd7639 | 
							
								
								
									
										6
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							@@ -50,6 +50,12 @@ jobs:
 | 
			
		||||
      - name: Build with Maven
 | 
			
		||||
        run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
 | 
			
		||||
 | 
			
		||||
      - name: Doxygen Action
 | 
			
		||||
        uses: mattnotmitt/doxygen-action@v1.1.0
 | 
			
		||||
        with:
 | 
			
		||||
          doxyfile-path: "./Doxyfile"
 | 
			
		||||
          working-directory: "."
 | 
			
		||||
 | 
			
		||||
      - name: Setup Pages
 | 
			
		||||
        uses: actions/configure-pages@v3
 | 
			
		||||
      - name: Upload artifact
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										413
									
								
								Doxyfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										413
									
								
								Doxyfile
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,413 @@
 | 
			
		||||
# Doxyfile 1.10.0
 | 
			
		||||
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Project related configuration options
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
DOXYFILE_ENCODING      = UTF-8
 | 
			
		||||
PROJECT_NAME           = "Ollama4j"
 | 
			
		||||
PROJECT_NUMBER         =
 | 
			
		||||
PROJECT_BRIEF          = "A Java library (wrapper/binding) for Ollama server."
 | 
			
		||||
PROJECT_LOGO           = ./logo-small.png
 | 
			
		||||
PROJECT_ICON           = ./logo-small.png
 | 
			
		||||
OUTPUT_DIRECTORY       = ./docs/build/doxygen
 | 
			
		||||
CREATE_SUBDIRS         = NO
 | 
			
		||||
CREATE_SUBDIRS_LEVEL   = 8
 | 
			
		||||
ALLOW_UNICODE_NAMES    = NO
 | 
			
		||||
OUTPUT_LANGUAGE        = English
 | 
			
		||||
BRIEF_MEMBER_DESC      = YES
 | 
			
		||||
REPEAT_BRIEF           = YES
 | 
			
		||||
ABBREVIATE_BRIEF       = "The $name class" \
 | 
			
		||||
                         "The $name widget" \
 | 
			
		||||
                         "The $name file" \
 | 
			
		||||
                         is \
 | 
			
		||||
                         provides \
 | 
			
		||||
                         specifies \
 | 
			
		||||
                         contains \
 | 
			
		||||
                         represents \
 | 
			
		||||
                         a \
 | 
			
		||||
                         an \
 | 
			
		||||
                         the
 | 
			
		||||
ALWAYS_DETAILED_SEC    = NO
 | 
			
		||||
INLINE_INHERITED_MEMB  = NO
 | 
			
		||||
FULL_PATH_NAMES        = YES
 | 
			
		||||
STRIP_FROM_PATH        =
 | 
			
		||||
STRIP_FROM_INC_PATH    =
 | 
			
		||||
SHORT_NAMES            = NO
 | 
			
		||||
JAVADOC_AUTOBRIEF      = NO
 | 
			
		||||
JAVADOC_BANNER         = NO
 | 
			
		||||
QT_AUTOBRIEF           = NO
 | 
			
		||||
MULTILINE_CPP_IS_BRIEF = NO
 | 
			
		||||
PYTHON_DOCSTRING       = YES
 | 
			
		||||
INHERIT_DOCS           = YES
 | 
			
		||||
SEPARATE_MEMBER_PAGES  = NO
 | 
			
		||||
TAB_SIZE               = 4
 | 
			
		||||
ALIASES                =
 | 
			
		||||
OPTIMIZE_OUTPUT_FOR_C  = NO
 | 
			
		||||
OPTIMIZE_OUTPUT_JAVA   = YES
 | 
			
		||||
OPTIMIZE_FOR_FORTRAN   = NO
 | 
			
		||||
OPTIMIZE_OUTPUT_VHDL   = NO
 | 
			
		||||
OPTIMIZE_OUTPUT_SLICE  = NO
 | 
			
		||||
EXTENSION_MAPPING      =
 | 
			
		||||
MARKDOWN_SUPPORT       = YES
 | 
			
		||||
TOC_INCLUDE_HEADINGS   = 5
 | 
			
		||||
MARKDOWN_ID_STYLE      = DOXYGEN
 | 
			
		||||
AUTOLINK_SUPPORT       = YES
 | 
			
		||||
BUILTIN_STL_SUPPORT    = NO
 | 
			
		||||
CPP_CLI_SUPPORT        = NO
 | 
			
		||||
SIP_SUPPORT            = NO
 | 
			
		||||
IDL_PROPERTY_SUPPORT   = YES
 | 
			
		||||
DISTRIBUTE_GROUP_DOC   = NO
 | 
			
		||||
GROUP_NESTED_COMPOUNDS = NO
 | 
			
		||||
SUBGROUPING            = YES
 | 
			
		||||
INLINE_GROUPED_CLASSES = NO
 | 
			
		||||
INLINE_SIMPLE_STRUCTS  = NO
 | 
			
		||||
TYPEDEF_HIDES_STRUCT   = NO
 | 
			
		||||
LOOKUP_CACHE_SIZE      = 0
 | 
			
		||||
NUM_PROC_THREADS       = 1
 | 
			
		||||
TIMESTAMP              = NO
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Build related configuration options
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
EXTRACT_ALL            = YES
 | 
			
		||||
EXTRACT_PRIVATE        = NO
 | 
			
		||||
EXTRACT_PRIV_VIRTUAL   = NO
 | 
			
		||||
EXTRACT_PACKAGE        = NO
 | 
			
		||||
EXTRACT_STATIC         = NO
 | 
			
		||||
EXTRACT_LOCAL_CLASSES  = YES
 | 
			
		||||
EXTRACT_LOCAL_METHODS  = NO
 | 
			
		||||
EXTRACT_ANON_NSPACES   = NO
 | 
			
		||||
RESOLVE_UNNAMED_PARAMS = YES
 | 
			
		||||
HIDE_UNDOC_MEMBERS     = NO
 | 
			
		||||
HIDE_UNDOC_CLASSES     = NO
 | 
			
		||||
HIDE_FRIEND_COMPOUNDS  = NO
 | 
			
		||||
HIDE_IN_BODY_DOCS      = NO
 | 
			
		||||
INTERNAL_DOCS          = NO
 | 
			
		||||
CASE_SENSE_NAMES       = SYSTEM
 | 
			
		||||
HIDE_SCOPE_NAMES       = NO
 | 
			
		||||
HIDE_COMPOUND_REFERENCE= NO
 | 
			
		||||
SHOW_HEADERFILE        = YES
 | 
			
		||||
SHOW_INCLUDE_FILES     = YES
 | 
			
		||||
SHOW_GROUPED_MEMB_INC  = NO
 | 
			
		||||
FORCE_LOCAL_INCLUDES   = NO
 | 
			
		||||
INLINE_INFO            = YES
 | 
			
		||||
SORT_MEMBER_DOCS       = YES
 | 
			
		||||
SORT_BRIEF_DOCS        = NO
 | 
			
		||||
SORT_MEMBERS_CTORS_1ST = NO
 | 
			
		||||
SORT_GROUP_NAMES       = NO
 | 
			
		||||
SORT_BY_SCOPE_NAME     = NO
 | 
			
		||||
STRICT_PROTO_MATCHING  = NO
 | 
			
		||||
GENERATE_TODOLIST      = YES
 | 
			
		||||
GENERATE_TESTLIST      = YES
 | 
			
		||||
GENERATE_BUGLIST       = YES
 | 
			
		||||
GENERATE_DEPRECATEDLIST= YES
 | 
			
		||||
ENABLED_SECTIONS       =
 | 
			
		||||
MAX_INITIALIZER_LINES  = 30
 | 
			
		||||
SHOW_USED_FILES        = YES
 | 
			
		||||
SHOW_FILES             = YES
 | 
			
		||||
SHOW_NAMESPACES        = YES
 | 
			
		||||
FILE_VERSION_FILTER    =
 | 
			
		||||
LAYOUT_FILE            =
 | 
			
		||||
CITE_BIB_FILES         =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to warning and progress messages
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
QUIET                  = NO
 | 
			
		||||
WARNINGS               = YES
 | 
			
		||||
WARN_IF_UNDOCUMENTED   = YES
 | 
			
		||||
WARN_IF_DOC_ERROR      = YES
 | 
			
		||||
WARN_IF_INCOMPLETE_DOC = YES
 | 
			
		||||
WARN_NO_PARAMDOC       = NO
 | 
			
		||||
WARN_IF_UNDOC_ENUM_VAL = NO
 | 
			
		||||
WARN_AS_ERROR          = NO
 | 
			
		||||
WARN_FORMAT            = "$file:$line: $text"
 | 
			
		||||
WARN_LINE_FORMAT       = "at line $line of file $file"
 | 
			
		||||
WARN_LOGFILE           =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the input files
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
INPUT                  = ./src/main
 | 
			
		||||
INPUT_ENCODING         = UTF-8
 | 
			
		||||
INPUT_FILE_ENCODING    =
 | 
			
		||||
FILE_PATTERNS          = *.c \
 | 
			
		||||
                         *.cc \
 | 
			
		||||
                         *.cxx \
 | 
			
		||||
                         *.cxxm \
 | 
			
		||||
                         *.cpp \
 | 
			
		||||
                         *.cppm \
 | 
			
		||||
                         *.ccm \
 | 
			
		||||
                         *.c++ \
 | 
			
		||||
                         *.c++m \
 | 
			
		||||
                         *.java \
 | 
			
		||||
                         *.ii \
 | 
			
		||||
                         *.ixx \
 | 
			
		||||
                         *.ipp \
 | 
			
		||||
                         *.i++ \
 | 
			
		||||
                         *.inl \
 | 
			
		||||
                         *.idl \
 | 
			
		||||
                         *.ddl \
 | 
			
		||||
                         *.odl \
 | 
			
		||||
                         *.h \
 | 
			
		||||
                         *.hh \
 | 
			
		||||
                         *.hxx \
 | 
			
		||||
                         *.hpp \
 | 
			
		||||
                         *.h++ \
 | 
			
		||||
                         *.ixx \
 | 
			
		||||
                         *.l \
 | 
			
		||||
                         *.cs \
 | 
			
		||||
                         *.d \
 | 
			
		||||
                         *.php \
 | 
			
		||||
                         *.php4 \
 | 
			
		||||
                         *.php5 \
 | 
			
		||||
                         *.phtml \
 | 
			
		||||
                         *.inc \
 | 
			
		||||
                         *.m \
 | 
			
		||||
                         *.markdown \
 | 
			
		||||
                         *.md \
 | 
			
		||||
                         *.mm \
 | 
			
		||||
                         *.dox \
 | 
			
		||||
                         *.py \
 | 
			
		||||
                         *.pyw \
 | 
			
		||||
                         *.f90 \
 | 
			
		||||
                         *.f95 \
 | 
			
		||||
                         *.f03 \
 | 
			
		||||
                         *.f08 \
 | 
			
		||||
                         *.f18 \
 | 
			
		||||
                         *.f \
 | 
			
		||||
                         *.for \
 | 
			
		||||
                         *.vhd \
 | 
			
		||||
                         *.vhdl \
 | 
			
		||||
                         *.ucf \
 | 
			
		||||
                         *.qsf \
 | 
			
		||||
                         *.ice
 | 
			
		||||
RECURSIVE              = YES
 | 
			
		||||
EXCLUDE                =
 | 
			
		||||
EXCLUDE_SYMLINKS       = NO
 | 
			
		||||
EXCLUDE_PATTERNS       =
 | 
			
		||||
EXCLUDE_SYMBOLS        =
 | 
			
		||||
EXAMPLE_PATH           =
 | 
			
		||||
EXAMPLE_PATTERNS       = *
 | 
			
		||||
EXAMPLE_RECURSIVE      = NO
 | 
			
		||||
IMAGE_PATH             =
 | 
			
		||||
INPUT_FILTER           =
 | 
			
		||||
FILTER_PATTERNS        =
 | 
			
		||||
FILTER_SOURCE_FILES    = NO
 | 
			
		||||
FILTER_SOURCE_PATTERNS =
 | 
			
		||||
USE_MDFILE_AS_MAINPAGE =
 | 
			
		||||
FORTRAN_COMMENT_AFTER  = 72
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to source browsing
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
SOURCE_BROWSER         = YES
 | 
			
		||||
INLINE_SOURCES         = NO
 | 
			
		||||
STRIP_CODE_COMMENTS    = YES
 | 
			
		||||
REFERENCED_BY_RELATION = NO
 | 
			
		||||
REFERENCES_RELATION    = NO
 | 
			
		||||
REFERENCES_LINK_SOURCE = YES
 | 
			
		||||
SOURCE_TOOLTIPS        = YES
 | 
			
		||||
USE_HTAGS              = NO
 | 
			
		||||
VERBATIM_HEADERS       = YES
 | 
			
		||||
CLANG_ASSISTED_PARSING = NO
 | 
			
		||||
CLANG_ADD_INC_PATHS    = YES
 | 
			
		||||
CLANG_OPTIONS          =
 | 
			
		||||
CLANG_DATABASE_PATH    =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the alphabetical class index
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
ALPHABETICAL_INDEX     = YES
 | 
			
		||||
IGNORE_PREFIX          =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the HTML output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_HTML          = YES
 | 
			
		||||
HTML_OUTPUT            = html
 | 
			
		||||
HTML_FILE_EXTENSION    = .html
 | 
			
		||||
HTML_HEADER            =
 | 
			
		||||
HTML_FOOTER            =
 | 
			
		||||
HTML_STYLESHEET        =
 | 
			
		||||
HTML_EXTRA_STYLESHEET  =
 | 
			
		||||
HTML_EXTRA_FILES       =
 | 
			
		||||
HTML_COLORSTYLE        = LIGHT
 | 
			
		||||
HTML_COLORSTYLE_HUE    = 220
 | 
			
		||||
HTML_COLORSTYLE_SAT    = 100
 | 
			
		||||
HTML_COLORSTYLE_GAMMA  = 80
 | 
			
		||||
HTML_DYNAMIC_MENUS     = YES
 | 
			
		||||
HTML_DYNAMIC_SECTIONS  = NO
 | 
			
		||||
HTML_CODE_FOLDING      = YES
 | 
			
		||||
HTML_COPY_CLIPBOARD    = YES
 | 
			
		||||
HTML_PROJECT_COOKIE    =
 | 
			
		||||
HTML_INDEX_NUM_ENTRIES = 100
 | 
			
		||||
GENERATE_DOCSET        = NO
 | 
			
		||||
DOCSET_FEEDNAME        = "Doxygen generated docs"
 | 
			
		||||
DOCSET_FEEDURL         =
 | 
			
		||||
DOCSET_BUNDLE_ID       = org.doxygen.Project
 | 
			
		||||
DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
 | 
			
		||||
DOCSET_PUBLISHER_NAME  = Publisher
 | 
			
		||||
GENERATE_HTMLHELP      = NO
 | 
			
		||||
CHM_FILE               =
 | 
			
		||||
HHC_LOCATION           =
 | 
			
		||||
GENERATE_CHI           = NO
 | 
			
		||||
CHM_INDEX_ENCODING     =
 | 
			
		||||
BINARY_TOC             = NO
 | 
			
		||||
TOC_EXPAND             = NO
 | 
			
		||||
SITEMAP_URL            =
 | 
			
		||||
GENERATE_QHP           = NO
 | 
			
		||||
QCH_FILE               =
 | 
			
		||||
QHP_NAMESPACE          = org.doxygen.Project
 | 
			
		||||
QHP_VIRTUAL_FOLDER     = doc
 | 
			
		||||
QHP_CUST_FILTER_NAME   =
 | 
			
		||||
QHP_CUST_FILTER_ATTRS  =
 | 
			
		||||
QHP_SECT_FILTER_ATTRS  =
 | 
			
		||||
QHG_LOCATION           =
 | 
			
		||||
GENERATE_ECLIPSEHELP   = NO
 | 
			
		||||
ECLIPSE_DOC_ID         = org.doxygen.Project
 | 
			
		||||
DISABLE_INDEX          = NO
 | 
			
		||||
GENERATE_TREEVIEW      = YES
 | 
			
		||||
FULL_SIDEBAR           = NO
 | 
			
		||||
ENUM_VALUES_PER_LINE   = 4
 | 
			
		||||
TREEVIEW_WIDTH         = 250
 | 
			
		||||
EXT_LINKS_IN_WINDOW    = NO
 | 
			
		||||
OBFUSCATE_EMAILS       = YES
 | 
			
		||||
HTML_FORMULA_FORMAT    = png
 | 
			
		||||
FORMULA_FONTSIZE       = 10
 | 
			
		||||
FORMULA_MACROFILE      =
 | 
			
		||||
USE_MATHJAX            = NO
 | 
			
		||||
MATHJAX_VERSION        = MathJax_2
 | 
			
		||||
MATHJAX_FORMAT         = HTML-CSS
 | 
			
		||||
MATHJAX_RELPATH        =
 | 
			
		||||
MATHJAX_EXTENSIONS     =
 | 
			
		||||
MATHJAX_CODEFILE       =
 | 
			
		||||
SEARCHENGINE           = YES
 | 
			
		||||
SERVER_BASED_SEARCH    = NO
 | 
			
		||||
EXTERNAL_SEARCH        = NO
 | 
			
		||||
SEARCHENGINE_URL       =
 | 
			
		||||
SEARCHDATA_FILE        = searchdata.xml
 | 
			
		||||
EXTERNAL_SEARCH_ID     =
 | 
			
		||||
EXTRA_SEARCH_MAPPINGS  =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the LaTeX output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_LATEX         = YES
 | 
			
		||||
LATEX_OUTPUT           = latex
 | 
			
		||||
LATEX_CMD_NAME         =
 | 
			
		||||
MAKEINDEX_CMD_NAME     = makeindex
 | 
			
		||||
LATEX_MAKEINDEX_CMD    = makeindex
 | 
			
		||||
COMPACT_LATEX          = NO
 | 
			
		||||
PAPER_TYPE             = a4
 | 
			
		||||
EXTRA_PACKAGES         =
 | 
			
		||||
LATEX_HEADER           =
 | 
			
		||||
LATEX_FOOTER           =
 | 
			
		||||
LATEX_EXTRA_STYLESHEET =
 | 
			
		||||
LATEX_EXTRA_FILES      =
 | 
			
		||||
PDF_HYPERLINKS         = YES
 | 
			
		||||
USE_PDFLATEX           = YES
 | 
			
		||||
LATEX_BATCHMODE        = NO
 | 
			
		||||
LATEX_HIDE_INDICES     = NO
 | 
			
		||||
LATEX_BIB_STYLE        = plain
 | 
			
		||||
LATEX_EMOJI_DIRECTORY  =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the RTF output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_RTF           = NO
 | 
			
		||||
RTF_OUTPUT             = rtf
 | 
			
		||||
COMPACT_RTF            = NO
 | 
			
		||||
RTF_HYPERLINKS         = NO
 | 
			
		||||
RTF_STYLESHEET_FILE    =
 | 
			
		||||
RTF_EXTENSIONS_FILE    =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the man page output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_MAN           = NO
 | 
			
		||||
MAN_OUTPUT             = man
 | 
			
		||||
MAN_EXTENSION          = .3
 | 
			
		||||
MAN_SUBDIR             =
 | 
			
		||||
MAN_LINKS              = NO
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the XML output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_XML           = NO
 | 
			
		||||
XML_OUTPUT             = xml
 | 
			
		||||
XML_PROGRAMLISTING     = YES
 | 
			
		||||
XML_NS_MEMB_FILE_SCOPE = NO
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the DOCBOOK output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_DOCBOOK       = NO
 | 
			
		||||
DOCBOOK_OUTPUT         = docbook
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options for the AutoGen Definitions output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_AUTOGEN_DEF   = NO
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to Sqlite3 output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_SQLITE3       = NO
 | 
			
		||||
SQLITE3_OUTPUT         = sqlite3
 | 
			
		||||
SQLITE3_RECREATE_DB    = YES
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the Perl module output
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
GENERATE_PERLMOD       = NO
 | 
			
		||||
PERLMOD_LATEX          = NO
 | 
			
		||||
PERLMOD_PRETTY         = YES
 | 
			
		||||
PERLMOD_MAKEVAR_PREFIX =
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to the preprocessor
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
ENABLE_PREPROCESSING   = YES
 | 
			
		||||
MACRO_EXPANSION        = NO
 | 
			
		||||
EXPAND_ONLY_PREDEF     = NO
 | 
			
		||||
SEARCH_INCLUDES        = YES
 | 
			
		||||
INCLUDE_PATH           =
 | 
			
		||||
INCLUDE_FILE_PATTERNS  =
 | 
			
		||||
PREDEFINED             =
 | 
			
		||||
EXPAND_AS_DEFINED      =
 | 
			
		||||
SKIP_FUNCTION_MACROS   = YES
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to external references
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
TAGFILES               =
 | 
			
		||||
GENERATE_TAGFILE       =
 | 
			
		||||
ALLEXTERNALS           = NO
 | 
			
		||||
EXTERNAL_GROUPS        = YES
 | 
			
		||||
EXTERNAL_PAGES         = YES
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
# Configuration options related to diagram generator tools
 | 
			
		||||
#---------------------------------------------------------------------------
 | 
			
		||||
HIDE_UNDOC_RELATIONS   = YES
 | 
			
		||||
HAVE_DOT               = NO
 | 
			
		||||
DOT_NUM_THREADS        = 0
 | 
			
		||||
DOT_COMMON_ATTR        = "fontname=Helvetica,fontsize=10"
 | 
			
		||||
DOT_EDGE_ATTR          = "labelfontname=Helvetica,labelfontsize=10"
 | 
			
		||||
DOT_NODE_ATTR          = "shape=box,height=0.2,width=0.4"
 | 
			
		||||
DOT_FONTPATH           =
 | 
			
		||||
CLASS_GRAPH            = YES
 | 
			
		||||
COLLABORATION_GRAPH    = YES
 | 
			
		||||
GROUP_GRAPHS           = YES
 | 
			
		||||
UML_LOOK               = NO
 | 
			
		||||
UML_LIMIT_NUM_FIELDS   = 10
 | 
			
		||||
DOT_UML_DETAILS        = NO
 | 
			
		||||
DOT_WRAP_THRESHOLD     = 17
 | 
			
		||||
TEMPLATE_RELATIONS     = NO
 | 
			
		||||
INCLUDE_GRAPH          = YES
 | 
			
		||||
INCLUDED_BY_GRAPH      = YES
 | 
			
		||||
CALL_GRAPH             = NO
 | 
			
		||||
CALLER_GRAPH           = NO
 | 
			
		||||
GRAPHICAL_HIERARCHY    = YES
 | 
			
		||||
DIRECTORY_GRAPH        = YES
 | 
			
		||||
DIR_GRAPH_MAX_DEPTH    = 1
 | 
			
		||||
DOT_IMAGE_FORMAT       = png
 | 
			
		||||
INTERACTIVE_SVG        = NO
 | 
			
		||||
DOT_PATH               =
 | 
			
		||||
DOTFILE_DIRS           =
 | 
			
		||||
DIA_PATH               =
 | 
			
		||||
DIAFILE_DIRS           =
 | 
			
		||||
PLANTUML_JAR_PATH      =
 | 
			
		||||
PLANTUML_CFG_FILE      =
 | 
			
		||||
PLANTUML_INCLUDE_PATH  =
 | 
			
		||||
DOT_GRAPH_MAX_NODES    = 50
 | 
			
		||||
MAX_DOT_GRAPH_DEPTH    = 0
 | 
			
		||||
DOT_MULTI_TARGETS      = NO
 | 
			
		||||
GENERATE_LEGEND        = YES
 | 
			
		||||
DOT_CLEANUP            = YES
 | 
			
		||||
MSCGEN_TOOL            =
 | 
			
		||||
MSCFILE_DIRS           =
 | 
			
		||||
							
								
								
									
										3
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								Makefile
									
									
									
									
									
								
							@@ -7,6 +7,9 @@ ut:
 | 
			
		||||
it:
 | 
			
		||||
	mvn clean verify -Pintegration-tests
 | 
			
		||||
 | 
			
		||||
doxygen:
 | 
			
		||||
	doxygen Doxyfile
 | 
			
		||||
 | 
			
		||||
list-releases:
 | 
			
		||||
	curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \
 | 
			
		||||
      --compressed \
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										41
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										41
									
								
								README.md
									
									
									
									
									
								
							@@ -67,10 +67,29 @@ In your Maven project, add this dependency:
 | 
			
		||||
<dependency>
 | 
			
		||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
			
		||||
    <artifactId>ollama4j</artifactId>
 | 
			
		||||
    <version>1.0.29</version>
 | 
			
		||||
    <version>1.0.70</version>
 | 
			
		||||
</dependency>
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
or
 | 
			
		||||
 | 
			
		||||
In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL:
 | 
			
		||||
 | 
			
		||||
```kotlin
 | 
			
		||||
dependencies {
 | 
			
		||||
 | 
			
		||||
    val ollama4jVersion = "1.0.70"
 | 
			
		||||
 | 
			
		||||
    implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion")
 | 
			
		||||
}
 | 
			
		||||
 ```
 | 
			
		||||
 | 
			
		||||
```groovy
 | 
			
		||||
dependencies {
 | 
			
		||||
    implementation("io.github.amithkoujalgi:ollama4j:1.0.70")
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
Latest release:
 | 
			
		||||
 | 
			
		||||

 | 
			
		||||
@@ -110,6 +129,16 @@ make it
 | 
			
		||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
 | 
			
		||||
Actions CI workflow.
 | 
			
		||||
 | 
			
		||||
#### Who's using Ollama4j?
 | 
			
		||||
 | 
			
		||||
- `Datafaker`: a library to generate fake data
 | 
			
		||||
    - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
 | 
			
		||||
- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j
 | 
			
		||||
    - https://github.com/TEAMPB/ollama4j-vaadin-ui
 | 
			
		||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
 | 
			
		||||
  server to translate all messages into a specfic target language.
 | 
			
		||||
    - https://github.com/liebki/ollama-translator
 | 
			
		||||
 | 
			
		||||
#### Traction
 | 
			
		||||
 | 
			
		||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
 | 
			
		||||
@@ -125,15 +154,15 @@ Actions CI workflow.
 | 
			
		||||
- [x] Update request body creation with Java objects
 | 
			
		||||
- [ ] Async APIs for images
 | 
			
		||||
- [ ] Add custom headers to requests
 | 
			
		||||
- [ ] Add additional params for `ask` APIs such as:
 | 
			
		||||
- [x] Add additional params for `ask` APIs such as:
 | 
			
		||||
    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
			
		||||
      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
			
		||||
    - [ ] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
			
		||||
    - [x] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
			
		||||
    - [x] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
			
		||||
      short
 | 
			
		||||
      conversational memory
 | 
			
		||||
    - [ ] `stream`: Add support for streaming responses from the model
 | 
			
		||||
    - [x] `stream`: Add support for streaming responses from the model
 | 
			
		||||
- [ ] Add test cases
 | 
			
		||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Extras",
 | 
			
		||||
  "position": 10,
 | 
			
		||||
  "position": 4,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to handle bunch of extra stuff."
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Ask",
 | 
			
		||||
  "position": 10,
 | 
			
		||||
  "label": "APIs - Generate",
 | 
			
		||||
  "position": 3,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to interact with LLMs."
 | 
			
		||||
							
								
								
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,205 @@
 | 
			
		||||
---
 | 
			
		||||
sidebar_position: 7
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
# Chat
 | 
			
		||||
 | 
			
		||||
This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
 | 
			
		||||
information using the history of already asked questions and the respective answers.
 | 
			
		||||
 | 
			
		||||
## Create a new conversation and use chat history to augment follow up questions
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
			
		||||
 | 
			
		||||
        // create first user question
 | 
			
		||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
			
		||||
                .build();
 | 
			
		||||
 | 
			
		||||
        // start conversation with model
 | 
			
		||||
        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
 | 
			
		||||
        System.out.println("First answer: " + chatResult.getResponse());
 | 
			
		||||
 | 
			
		||||
        // create next userQuestion
 | 
			
		||||
        requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
 | 
			
		||||
 | 
			
		||||
        // "continue" conversation with model
 | 
			
		||||
        chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
 | 
			
		||||
        System.out.println("Second answer: " + chatResult.getResponse());
 | 
			
		||||
 | 
			
		||||
        System.out.println("Chat History: " + chatResult.getChatHistory());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> First answer: Should be Paris!
 | 
			
		||||
>
 | 
			
		||||
> Second answer: Marseille.
 | 
			
		||||
>
 | 
			
		||||
> Chat History:
 | 
			
		||||
 | 
			
		||||
```json
 | 
			
		||||
[
 | 
			
		||||
  {
 | 
			
		||||
    "role": "user",
 | 
			
		||||
    "content": "What is the capital of France?",
 | 
			
		||||
    "images": []
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    "role": "assistant",
 | 
			
		||||
    "content": "Should be Paris!",
 | 
			
		||||
    "images": []
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    "role": "user",
 | 
			
		||||
    "content": "And what is the second largest city?",
 | 
			
		||||
    "images": []
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    "role": "assistant",
 | 
			
		||||
    "content": "Marseille.",
 | 
			
		||||
    "images": []
 | 
			
		||||
  }
 | 
			
		||||
]
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
## Create a conversation where the answer is streamed
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
			
		||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
			
		||||
                        "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
			
		||||
                .build();
 | 
			
		||||
 | 
			
		||||
        // define a handler (Consumer<String>)
 | 
			
		||||
        OllamaStreamHandler streamHandler = (s) -> {
 | 
			
		||||
            System.out.println(s);
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> The
 | 
			
		||||
> The capital
 | 
			
		||||
> The capital of
 | 
			
		||||
> The capital of France
 | 
			
		||||
> The capital of France is
 | 
			
		||||
> The capital of France is Paris
 | 
			
		||||
> The capital of France is Paris.
 | 
			
		||||
 | 
			
		||||
## Use a simple Console Output Stream Handler
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
 | 
			
		||||
 | 
			
		||||
public class Main {
 | 
			
		||||
    public static void main(String[] args) throws Exception {
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
			
		||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
 | 
			
		||||
                .build();
 | 
			
		||||
        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
 | 
			
		||||
        ollamaAPI.chat(requestModel, streamHandler);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
## Create a new conversation with individual system prompt
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
			
		||||
 | 
			
		||||
        // create request with system-prompt (overriding the model defaults) and user question
 | 
			
		||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
			
		||||
                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
			
		||||
                .build();
 | 
			
		||||
 | 
			
		||||
        // start conversation with model
 | 
			
		||||
        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
 | 
			
		||||
        System.out.println(chatResult.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> NI.
 | 
			
		||||
 | 
			
		||||
## Create a conversation about an image (requires model with image recognition skills)
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
 | 
			
		||||
 | 
			
		||||
        // Load Image from File and attach to user message (alternatively images could also be added via URL)
 | 
			
		||||
        OllamaChatRequestModel requestModel =
 | 
			
		||||
                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
			
		||||
                        List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
			
		||||
 | 
			
		||||
        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
        System.out.println("First answer: " + chatResult.getResponse());
 | 
			
		||||
 | 
			
		||||
        builder.reset();
 | 
			
		||||
 | 
			
		||||
        // Use history to ask further questions about the image or assistant answer
 | 
			
		||||
        requestModel =
 | 
			
		||||
                builder.withMessages(chatResult.getChatHistory())
 | 
			
		||||
                        .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
 | 
			
		||||
 | 
			
		||||
        chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
        System.out.println("Second answer: " + chatResult.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
 | 
			
		||||
> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
 | 
			
		||||
> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
 | 
			
		||||
> evening, given the warm lighting and the low position of the sun in the sky.
 | 
			
		||||
>
 | 
			
		||||
> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog
 | 
			
		||||
> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
 | 
			
		||||
> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
 | 
			
		||||
> confidently.
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
sidebar_position: 2
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
# Ask - Async
 | 
			
		||||
# Generate - Async
 | 
			
		||||
 | 
			
		||||
This API lets you ask questions to the LLMs in a asynchronous way.
 | 
			
		||||
These APIs correlate to
 | 
			
		||||
@@ -19,13 +19,13 @@ public class Main {
 | 
			
		||||
 | 
			
		||||
        String prompt = "Who are you?";
 | 
			
		||||
 | 
			
		||||
        OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt);
 | 
			
		||||
        OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
 | 
			
		||||
 | 
			
		||||
        while (!callback.isComplete() || !callback.getStream().isEmpty()) {
 | 
			
		||||
            // poll for data from the response stream
 | 
			
		||||
            String result = callback.getStream().poll();
 | 
			
		||||
            if (response != null) {
 | 
			
		||||
                System.out.print(result.getResponse());
 | 
			
		||||
            if (result != null) {
 | 
			
		||||
                System.out.print(result);
 | 
			
		||||
            }
 | 
			
		||||
            Thread.sleep(100);
 | 
			
		||||
        }
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
sidebar_position: 3
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
# Ask - With Image Files
 | 
			
		||||
# Generate - With Image Files
 | 
			
		||||
 | 
			
		||||
This API lets you ask questions along with the image files to the LLMs.
 | 
			
		||||
These APIs correlate to
 | 
			
		||||
@@ -15,7 +15,7 @@ recommended.
 | 
			
		||||
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
## Ask (Sync)
 | 
			
		||||
## Synchronous mode
 | 
			
		||||
 | 
			
		||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
 | 
			
		||||
 | 
			
		||||
@@ -29,7 +29,7 @@ public class Main {
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
			
		||||
 | 
			
		||||
        OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA,
 | 
			
		||||
        OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
 | 
			
		||||
                "What's in this image?",
 | 
			
		||||
                List.of(
 | 
			
		||||
                        new File("/path/to/image")));
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
sidebar_position: 4
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
# Ask - With Image URLs
 | 
			
		||||
# Generate - With Image URLs
 | 
			
		||||
 | 
			
		||||
This API lets you ask questions along with the image files to the LLMs.
 | 
			
		||||
These APIs correlate to
 | 
			
		||||
@@ -29,7 +29,7 @@ public class Main {
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        ollamaAPI.setRequestTimeoutSeconds(10);
 | 
			
		||||
 | 
			
		||||
        OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA,
 | 
			
		||||
        OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
 | 
			
		||||
                "What's in this image?",
 | 
			
		||||
                List.of(
 | 
			
		||||
                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
sidebar_position: 1
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
# Ask - Sync
 | 
			
		||||
# Generate - Sync
 | 
			
		||||
 | 
			
		||||
This API lets you ask questions to the LLMs in a synchronous way.
 | 
			
		||||
These APIs correlate to
 | 
			
		||||
@@ -25,7 +25,7 @@ public class Main {
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
 | 
			
		||||
        OllamaResult result =
 | 
			
		||||
                ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
 | 
			
		||||
                ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build());
 | 
			
		||||
 | 
			
		||||
        System.out.println(result.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
@@ -41,6 +41,41 @@ You will get a response similar to:
 | 
			
		||||
> require
 | 
			
		||||
> natural language understanding and generation capabilities.
 | 
			
		||||
 | 
			
		||||
## Try asking a question, receiving the answer streamed
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
public class Main {
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
 | 
			
		||||
        String host = "http://localhost:11434/";
 | 
			
		||||
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
			
		||||
        // define a stream handler (Consumer<String>)
 | 
			
		||||
        OllamaStreamHandler streamHandler = (s) -> {
 | 
			
		||||
           System.out.println(s);
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        // Should be called using seperate thread to gain non blocking streaming effect.
 | 
			
		||||
        OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
			
		||||
          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
          new OptionsBuilder().build(), streamHandler);
 | 
			
		||||
        
 | 
			
		||||
        System.out.println("Full response: " +result.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
```
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
> The
 | 
			
		||||
> The capital
 | 
			
		||||
> The capital of
 | 
			
		||||
> The capital of France
 | 
			
		||||
> The capital of France is 
 | 
			
		||||
> The capital of France is Paris
 | 
			
		||||
> The capital of France is Paris.
 | 
			
		||||
> Full response: The capital of France is Paris.
 | 
			
		||||
 | 
			
		||||
## Try asking a question from general topics.
 | 
			
		||||
 | 
			
		||||
```java
 | 
			
		||||
@@ -55,7 +90,7 @@ public class Main {
 | 
			
		||||
        String prompt = "List all cricket world cup teams of 2019.";
 | 
			
		||||
 | 
			
		||||
        OllamaResult result =
 | 
			
		||||
                ollamaAPI.ask(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
 | 
			
		||||
                ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
 | 
			
		||||
 | 
			
		||||
        System.out.println(result.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
@@ -97,7 +132,7 @@ public class Main {
 | 
			
		||||
                SamplePrompts.getSampleDatabasePromptWithQuestion(
 | 
			
		||||
                        "List all customer names who have bought one or more products");
 | 
			
		||||
        OllamaResult result =
 | 
			
		||||
                ollamaAPI.ask(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
 | 
			
		||||
                ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
 | 
			
		||||
        System.out.println(result.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -42,7 +42,7 @@ public class AskPhi {
 | 
			
		||||
                        .addSeparator()
 | 
			
		||||
                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
			
		||||
 | 
			
		||||
        OllamaResult response = ollamaAPI.ask(model, promptBuilder.build());
 | 
			
		||||
        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
 | 
			
		||||
        System.out.println(response.getResponse());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "label": "APIs - Model Management",
 | 
			
		||||
  "position": 4,
 | 
			
		||||
  "position": 2,
 | 
			
		||||
  "link": {
 | 
			
		||||
    "type": "generated-index",
 | 
			
		||||
    "description": "Details of APIs to manage LLMs."
 | 
			
		||||
 
 | 
			
		||||
@@ -79,6 +79,7 @@ const config = {
 | 
			
		||||
                        label: 'Docs',
 | 
			
		||||
                    },
 | 
			
		||||
                    {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
 | 
			
		||||
                    {to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
 | 
			
		||||
                    {to: '/blog', label: 'Blog', position: 'left'},
 | 
			
		||||
                    {
 | 
			
		||||
                        href: 'https://github.com/amithkoujalgi/ollama4j',
 | 
			
		||||
 
 | 
			
		||||
@@ -28,3 +28,13 @@
 | 
			
		||||
    --ifm-color-primary-lightest: #4fddbf;
 | 
			
		||||
    --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
article > header > h1 {
 | 
			
		||||
    font-size: 2rem !important;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
div > h1,
 | 
			
		||||
header > h1,
 | 
			
		||||
h2 > a {
 | 
			
		||||
    font-size: 2rem !important;
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								logo-small.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								logo-small.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| 
		 After Width: | Height: | Size: 5.0 KiB  | 
							
								
								
									
										21
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										21
									
								
								pom.xml
									
									
									
									
									
								
							@@ -4,7 +4,7 @@
 | 
			
		||||
 | 
			
		||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
			
		||||
    <artifactId>ollama4j</artifactId>
 | 
			
		||||
    <version>1.0.42</version>
 | 
			
		||||
    <version>1.0.72</version>
 | 
			
		||||
 | 
			
		||||
    <name>Ollama4j</name>
 | 
			
		||||
    <description>Java library for interacting with Ollama API.</description>
 | 
			
		||||
@@ -39,7 +39,7 @@
 | 
			
		||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
			
		||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
			
		||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
			
		||||
    <tag>v1.0.42</tag>
 | 
			
		||||
        <tag>v1.0.72</tag>
 | 
			
		||||
    </scm>
 | 
			
		||||
 | 
			
		||||
    <build>
 | 
			
		||||
@@ -99,7 +99,7 @@
 | 
			
		||||
                <configuration>
 | 
			
		||||
                    <skipTests>${skipUnitTests}</skipTests>
 | 
			
		||||
                    <includes>
 | 
			
		||||
            <include>**/unittests/*.java</include>
 | 
			
		||||
                        <include>**/unittests/**/*.java</include>
 | 
			
		||||
                    </includes>
 | 
			
		||||
                </configuration>
 | 
			
		||||
            </plugin>
 | 
			
		||||
@@ -149,7 +149,12 @@
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.fasterxml.jackson.core</groupId>
 | 
			
		||||
            <artifactId>jackson-databind</artifactId>
 | 
			
		||||
      <version>2.15.3</version>
 | 
			
		||||
            <version>2.17.1</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.fasterxml.jackson.datatype</groupId>
 | 
			
		||||
            <artifactId>jackson-datatype-jsr310</artifactId>
 | 
			
		||||
            <version>2.17.1</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>ch.qos.logback</groupId>
 | 
			
		||||
@@ -174,6 +179,12 @@
 | 
			
		||||
            <version>4.1.0</version>
 | 
			
		||||
            <scope>test</scope>
 | 
			
		||||
        </dependency>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>org.json</groupId>
 | 
			
		||||
            <artifactId>json</artifactId>
 | 
			
		||||
            <version>20240205</version>
 | 
			
		||||
            <scope>test</scope>
 | 
			
		||||
        </dependency>
 | 
			
		||||
    </dependencies>
 | 
			
		||||
 | 
			
		||||
    <distributionManagement>
 | 
			
		||||
@@ -203,7 +214,7 @@
 | 
			
		||||
                    <plugin>
 | 
			
		||||
                        <groupId>org.jacoco</groupId>
 | 
			
		||||
                        <artifactId>jacoco-maven-plugin</artifactId>
 | 
			
		||||
            <version>0.8.7</version>
 | 
			
		||||
                        <version>0.8.11</version>
 | 
			
		||||
                        <executions>
 | 
			
		||||
                            <execution>
 | 
			
		||||
                                <goals>
 | 
			
		||||
 
 | 
			
		||||
@@ -2,21 +2,22 @@ package io.github.amithkoujalgi.ollama4j.core;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.*;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import java.io.BufferedReader;
 | 
			
		||||
import java.io.ByteArrayOutputStream;
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.io.InputStreamReader;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import java.io.*;
 | 
			
		||||
import java.net.URI;
 | 
			
		||||
import java.net.URISyntaxException;
 | 
			
		||||
import java.net.URL;
 | 
			
		||||
import java.net.http.HttpClient;
 | 
			
		||||
import java.net.http.HttpConnectTimeoutException;
 | 
			
		||||
import java.net.http.HttpRequest;
 | 
			
		||||
@@ -27,16 +28,16 @@ import java.time.Duration;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.Base64;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
/** The base Ollama API class. */
 | 
			
		||||
/**
 | 
			
		||||
 * The base Ollama API class.
 | 
			
		||||
 */
 | 
			
		||||
@SuppressWarnings("DuplicatedCode")
 | 
			
		||||
public class OllamaAPI {
 | 
			
		||||
 | 
			
		||||
    private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
 | 
			
		||||
    private final String host;
 | 
			
		||||
  private long requestTimeoutSeconds = 3;
 | 
			
		||||
    private long requestTimeoutSeconds = 10;
 | 
			
		||||
    private boolean verbose = true;
 | 
			
		||||
    private BasicAuth basicAuth;
 | 
			
		||||
 | 
			
		||||
@@ -308,8 +309,18 @@ public class OllamaAPI {
 | 
			
		||||
     */
 | 
			
		||||
    public List<Double> generateEmbeddings(String model, String prompt)
 | 
			
		||||
            throws IOException, InterruptedException, OllamaBaseException {
 | 
			
		||||
        return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}.
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelRequest request for '/api/embeddings' endpoint
 | 
			
		||||
     * @return embeddings
 | 
			
		||||
     */
 | 
			
		||||
    public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
 | 
			
		||||
        URI uri = URI.create(this.host + "/api/embeddings");
 | 
			
		||||
    String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
 | 
			
		||||
        String jsonData = modelRequest.toString();
 | 
			
		||||
        HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
        HttpRequest.Builder requestBuilder =
 | 
			
		||||
                getRequestBuilderDefault(uri)
 | 
			
		||||
@@ -320,8 +331,8 @@ public class OllamaAPI {
 | 
			
		||||
        int statusCode = response.statusCode();
 | 
			
		||||
        String responseBody = response.body();
 | 
			
		||||
        if (statusCode == 200) {
 | 
			
		||||
      EmbeddingResponse embeddingResponse =
 | 
			
		||||
          Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
 | 
			
		||||
            OllamaEmbeddingResponseModel embeddingResponse =
 | 
			
		||||
                    Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
 | 
			
		||||
            return embeddingResponse.getEmbedding();
 | 
			
		||||
        } else {
 | 
			
		||||
            throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
@@ -329,33 +340,45 @@ public class OllamaAPI {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
   * Ask a question to a model running on Ollama server. This is a sync/blocking call.
 | 
			
		||||
     * Generate response for a question to a model running on Ollama server. This is a sync/blocking
 | 
			
		||||
     * call.
 | 
			
		||||
     *
 | 
			
		||||
     * @param model         the ollama model to ask the question to
 | 
			
		||||
     * @param prompt        the prompt/question text
 | 
			
		||||
     * @param options       the Options object - <a
 | 
			
		||||
     *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
     *                      details on the options</a>
 | 
			
		||||
     * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
     * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
     */
 | 
			
		||||
  public OllamaResult ask(String model, String prompt, Options options)
 | 
			
		||||
    public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
			
		||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
			
		||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
    return askSync(ollamaRequestModel);
 | 
			
		||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
   * Ask a question to a model running on Ollama server and get a callback handle that can be used
 | 
			
		||||
   * to check for status and get the response from the model later. This would be an
 | 
			
		||||
   * async/non-blocking call.
 | 
			
		||||
     * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
     * <p>
 | 
			
		||||
     * Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaResult generate(String model, String prompt, Options options)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        return generate(model, prompt, options, null);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Generate response for a question to a model running on Ollama server and get a callback handle
 | 
			
		||||
     * that can be used to check for status and get the response from the model later. This would be
 | 
			
		||||
     * an async/non-blocking call.
 | 
			
		||||
     *
 | 
			
		||||
     * @param model  the ollama model to ask the question to
 | 
			
		||||
     * @param prompt the prompt/question text
 | 
			
		||||
     * @return the ollama async result callback handle
 | 
			
		||||
     */
 | 
			
		||||
  public OllamaAsyncResultCallback askAsync(String model, String prompt) {
 | 
			
		||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
			
		||||
    public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
			
		||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
			
		||||
 | 
			
		||||
        URI uri = URI.create(this.host + "/api/generate");
 | 
			
		||||
        OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
			
		||||
@@ -372,16 +395,33 @@ public class OllamaAPI {
 | 
			
		||||
     * @param model         the ollama model to ask the question to
 | 
			
		||||
     * @param prompt        the prompt/question text
 | 
			
		||||
     * @param imageFiles    the list of image files to use for the question
 | 
			
		||||
     * @param options       the Options object - <a
 | 
			
		||||
     *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
     *                      details on the options</a>
 | 
			
		||||
     * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
     * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
     */
 | 
			
		||||
  public OllamaResult askWithImageFiles(String model, String prompt, List<File> imageFiles)
 | 
			
		||||
    public OllamaResult generateWithImageFiles(
 | 
			
		||||
            String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        List<String> images = new ArrayList<>();
 | 
			
		||||
        for (File imageFile : imageFiles) {
 | 
			
		||||
            images.add(encodeFileToBase64(imageFile));
 | 
			
		||||
        }
 | 
			
		||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
			
		||||
    return askSync(ollamaRequestModel);
 | 
			
		||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
			
		||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
     * <p>
 | 
			
		||||
     * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaResult generateWithImageFiles(
 | 
			
		||||
            String model, String prompt, List<File> imageFiles, Options options)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        return generateWithImageFiles(model, prompt, imageFiles, options, null);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
@@ -391,18 +431,93 @@ public class OllamaAPI {
 | 
			
		||||
     * @param model         the ollama model to ask the question to
 | 
			
		||||
     * @param prompt        the prompt/question text
 | 
			
		||||
     * @param imageURLs     the list of image URLs to use for the question
 | 
			
		||||
     * @param options       the Options object - <a
 | 
			
		||||
     *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
			
		||||
     *                      details on the options</a>
 | 
			
		||||
     * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
			
		||||
     * @return OllamaResult that includes response text and time taken for response
 | 
			
		||||
     */
 | 
			
		||||
  public OllamaResult askWithImageURLs(String model, String prompt, List<String> imageURLs)
 | 
			
		||||
    public OllamaResult generateWithImageURLs(
 | 
			
		||||
            String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
        List<String> images = new ArrayList<>();
 | 
			
		||||
        for (String imageURL : imageURLs) {
 | 
			
		||||
      images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL)));
 | 
			
		||||
            images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
			
		||||
        }
 | 
			
		||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
			
		||||
    return askSync(ollamaRequestModel);
 | 
			
		||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
			
		||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
			
		||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Convenience method to call Ollama API without streaming responses.
 | 
			
		||||
     * <p>
 | 
			
		||||
     * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
 | 
			
		||||
                                              Options options)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
        return generateWithImageURLs(model, prompt, imageURLs, options, null);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
 | 
			
		||||
     * 'api/chat'.
 | 
			
		||||
     *
 | 
			
		||||
     * @param model    the ollama model to ask the question to
 | 
			
		||||
     * @param messages chat history / message stack to send to the model
 | 
			
		||||
     * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response.
 | 
			
		||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
			
		||||
     * @throws IOException          in case the responseStream can not be read
 | 
			
		||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
 | 
			
		||||
        return chat(builder.withMessages(messages).build());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
			
		||||
     * <p>
 | 
			
		||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
			
		||||
     *
 | 
			
		||||
     * @param request request object to be sent to the server
 | 
			
		||||
     * @return
 | 
			
		||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
			
		||||
     * @throws IOException          in case the responseStream can not be read
 | 
			
		||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        return chat(request, null);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
			
		||||
     * <p>
 | 
			
		||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
			
		||||
     *
 | 
			
		||||
     * @param request       request object to be sent to the server
 | 
			
		||||
     * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
 | 
			
		||||
     * @return
 | 
			
		||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
			
		||||
     * @throws IOException          in case the responseStream can not be read
 | 
			
		||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
			
		||||
        OllamaResult result;
 | 
			
		||||
        if (streamHandler != null) {
 | 
			
		||||
            request.setStream(true);
 | 
			
		||||
            result = requestCaller.call(request, streamHandler);
 | 
			
		||||
        } else {
 | 
			
		||||
            result = requestCaller.callSync(request);
 | 
			
		||||
        }
 | 
			
		||||
        return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // technical private methods //
 | 
			
		||||
 | 
			
		||||
    private static String encodeFileToBase64(File file) throws IOException {
 | 
			
		||||
        return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath()));
 | 
			
		||||
    }
 | 
			
		||||
@@ -411,68 +526,19 @@ public class OllamaAPI {
 | 
			
		||||
        return Base64.getEncoder().encodeToString(bytes);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  private static byte[] loadImageBytesFromUrl(String imageUrl)
 | 
			
		||||
      throws IOException, URISyntaxException {
 | 
			
		||||
    URL url = new URI(imageUrl).toURL();
 | 
			
		||||
    try (InputStream in = url.openStream();
 | 
			
		||||
        ByteArrayOutputStream out = new ByteArrayOutputStream()) {
 | 
			
		||||
      byte[] buffer = new byte[1024];
 | 
			
		||||
      int bytesRead;
 | 
			
		||||
      while ((bytesRead = in.read(buffer)) != -1) {
 | 
			
		||||
        out.write(buffer, 0, bytesRead);
 | 
			
		||||
      }
 | 
			
		||||
      return out.toByteArray();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private OllamaResult askSync(OllamaRequestModel ollamaRequestModel)
 | 
			
		||||
    private OllamaResult generateSyncForOllamaRequestModel(
 | 
			
		||||
            OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    long startTime = System.currentTimeMillis();
 | 
			
		||||
    HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
    URI uri = URI.create(this.host + "/api/generate");
 | 
			
		||||
    HttpRequest.Builder requestBuilder =
 | 
			
		||||
        getRequestBuilderDefault(uri)
 | 
			
		||||
            .POST(
 | 
			
		||||
                HttpRequest.BodyPublishers.ofString(
 | 
			
		||||
                    Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)));
 | 
			
		||||
    HttpRequest request = requestBuilder.build();
 | 
			
		||||
    logger.debug("Ask model '" + ollamaRequestModel + "' ...");
 | 
			
		||||
    HttpResponse<InputStream> response =
 | 
			
		||||
        httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
 | 
			
		||||
    int statusCode = response.statusCode();
 | 
			
		||||
    InputStream responseBodyStream = response.body();
 | 
			
		||||
    StringBuilder responseBuffer = new StringBuilder();
 | 
			
		||||
    try (BufferedReader reader =
 | 
			
		||||
        new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
 | 
			
		||||
      String line;
 | 
			
		||||
      while ((line = reader.readLine()) != null) {
 | 
			
		||||
        if (statusCode == 404) {
 | 
			
		||||
          logger.warn("Status code: 404 (Not Found)");
 | 
			
		||||
          OllamaErrorResponseModel ollamaResponseModel =
 | 
			
		||||
              Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
			
		||||
          responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
        } else if (statusCode == 401) {
 | 
			
		||||
          logger.warn("Status code: 401 (Unauthorized)");
 | 
			
		||||
          OllamaErrorResponseModel ollamaResponseModel =
 | 
			
		||||
              Utils.getObjectMapper()
 | 
			
		||||
                  .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
 | 
			
		||||
          responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
        OllamaGenerateEndpointCaller requestCaller =
 | 
			
		||||
                new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
			
		||||
        OllamaResult result;
 | 
			
		||||
        if (streamHandler != null) {
 | 
			
		||||
            ollamaRequestModel.setStream(true);
 | 
			
		||||
            result = requestCaller.call(ollamaRequestModel, streamHandler);
 | 
			
		||||
        } else {
 | 
			
		||||
          OllamaResponseModel ollamaResponseModel =
 | 
			
		||||
              Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
			
		||||
          if (!ollamaResponseModel.isDone()) {
 | 
			
		||||
            responseBuffer.append(ollamaResponseModel.getResponse());
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    if (statusCode != 200) {
 | 
			
		||||
      logger.error("Status code " + statusCode);
 | 
			
		||||
      throw new OllamaBaseException(responseBuffer.toString());
 | 
			
		||||
    } else {
 | 
			
		||||
      long endTime = System.currentTimeMillis();
 | 
			
		||||
      return new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
 | 
			
		||||
            result = requestCaller.callSync(ollamaRequestModel);
 | 
			
		||||
        }
 | 
			
		||||
        return result;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,7 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core;
 | 
			
		||||
 | 
			
		||||
import java.util.function.Consumer;
 | 
			
		||||
 | 
			
		||||
public interface OllamaStreamHandler extends Consumer<String>{
 | 
			
		||||
    void accept(String message);
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,14 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.impl;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
 | 
			
		||||
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
 | 
			
		||||
    private final StringBuffer response = new StringBuffer();
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    public void accept(String message) {
 | 
			
		||||
        String substr = message.substring(response.length());
 | 
			
		||||
        response.append(substr);
 | 
			
		||||
        System.out.print(substr);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,14 +1,22 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import java.time.LocalDateTime;
 | 
			
		||||
import java.time.OffsetDateTime;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
public class Model {
 | 
			
		||||
 | 
			
		||||
  private String name;
 | 
			
		||||
  private String model;
 | 
			
		||||
  @JsonProperty("modified_at")
 | 
			
		||||
  private String modifiedAt;
 | 
			
		||||
  private OffsetDateTime modifiedAt;
 | 
			
		||||
  @JsonProperty("expires_at")
 | 
			
		||||
  private OffsetDateTime expiresAt;
 | 
			
		||||
  private String digest;
 | 
			
		||||
  private long size;
 | 
			
		||||
  @JsonProperty("details")
 | 
			
		||||
@@ -33,4 +41,13 @@ public class Model {
 | 
			
		||||
    return name.split(":")[1];
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@@ -16,5 +17,14 @@ public class ModelDetail {
 | 
			
		||||
  private String parameters;
 | 
			
		||||
  private String template;
 | 
			
		||||
  private String system;
 | 
			
		||||
  private Map<String, String> details;
 | 
			
		||||
  private ModelMeta details;
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@@ -21,4 +23,13 @@ public class ModelMeta {
 | 
			
		||||
 | 
			
		||||
  @JsonProperty("quantization_level")
 | 
			
		||||
  private String quantizationLevel;
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,8 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import java.io.BufferedReader;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
@@ -22,7 +24,7 @@ import lombok.Getter;
 | 
			
		||||
@SuppressWarnings("unused")
 | 
			
		||||
public class OllamaAsyncResultCallback extends Thread {
 | 
			
		||||
  private final HttpRequest.Builder requestBuilder;
 | 
			
		||||
  private final OllamaRequestModel ollamaRequestModel;
 | 
			
		||||
  private final OllamaGenerateRequestModel ollamaRequestModel;
 | 
			
		||||
  private final Queue<String> queue = new LinkedList<>();
 | 
			
		||||
  private String result;
 | 
			
		||||
  private boolean isDone;
 | 
			
		||||
@@ -47,7 +49,7 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
			
		||||
 | 
			
		||||
  public OllamaAsyncResultCallback(
 | 
			
		||||
      HttpRequest.Builder requestBuilder,
 | 
			
		||||
      OllamaRequestModel ollamaRequestModel,
 | 
			
		||||
      OllamaGenerateRequestModel ollamaRequestModel,
 | 
			
		||||
      long requestTimeoutSeconds) {
 | 
			
		||||
    this.requestBuilder = requestBuilder;
 | 
			
		||||
    this.ollamaRequestModel = ollamaRequestModel;
 | 
			
		||||
@@ -87,8 +89,8 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
			
		||||
            queue.add(ollamaResponseModel.getError());
 | 
			
		||||
            responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
          } else {
 | 
			
		||||
            OllamaResponseModel ollamaResponseModel =
 | 
			
		||||
                Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
			
		||||
            OllamaGenerateResponseModel ollamaResponseModel =
 | 
			
		||||
                Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
			
		||||
            queue.add(ollamaResponseModel.getResponse());
 | 
			
		||||
            if (!ollamaResponseModel.isDone()) {
 | 
			
		||||
              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,35 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonInclude;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
			
		||||
public abstract class OllamaCommonRequestModel {
 | 
			
		||||
  
 | 
			
		||||
  protected String model;  
 | 
			
		||||
  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
			
		||||
  @JsonProperty(value = "format")
 | 
			
		||||
  protected Boolean returnFormatJson;
 | 
			
		||||
  protected Map<String, Object> options;
 | 
			
		||||
  protected String template;
 | 
			
		||||
  protected boolean stream;
 | 
			
		||||
  @JsonProperty(value = "keep_alive")
 | 
			
		||||
  protected String keepAlive;
 | 
			
		||||
 | 
			
		||||
  
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,8 +1,6 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,45 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer;
 | 
			
		||||
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import lombok.AllArgsConstructor;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
import lombok.NoArgsConstructor;
 | 
			
		||||
import lombok.NonNull;
 | 
			
		||||
import lombok.RequiredArgsConstructor;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint.
 | 
			
		||||
 *
 | 
			
		||||
 * @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a>
 | 
			
		||||
 */
 | 
			
		||||
@Data
 | 
			
		||||
@AllArgsConstructor
 | 
			
		||||
@RequiredArgsConstructor
 | 
			
		||||
@NoArgsConstructor
 | 
			
		||||
public class OllamaChatMessage {
 | 
			
		||||
 | 
			
		||||
    @NonNull
 | 
			
		||||
    private OllamaChatMessageRole role;
 | 
			
		||||
 | 
			
		||||
    @NonNull
 | 
			
		||||
    private String content;
 | 
			
		||||
 | 
			
		||||
    @JsonSerialize(using = FileToBase64Serializer.class)
 | 
			
		||||
    private List<byte[]> images;
 | 
			
		||||
    
 | 
			
		||||
      @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,19 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonValue;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Defines the possible Chat Message roles.
 | 
			
		||||
 */
 | 
			
		||||
public enum OllamaChatMessageRole {
 | 
			
		||||
    SYSTEM("system"),
 | 
			
		||||
    USER("user"),
 | 
			
		||||
    ASSISTANT("assistant");
 | 
			
		||||
 | 
			
		||||
    @JsonValue
 | 
			
		||||
    private String roleName;
 | 
			
		||||
 | 
			
		||||
    private OllamaChatMessageRole(String roleName){
 | 
			
		||||
        this.roleName = roleName;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,110 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.net.URISyntaxException;
 | 
			
		||||
import java.nio.file.Files;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.stream.Collectors;
 | 
			
		||||
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
 | 
			
		||||
 */
 | 
			
		||||
public class OllamaChatRequestBuilder {
 | 
			
		||||
 | 
			
		||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
 | 
			
		||||
 | 
			
		||||
    private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
 | 
			
		||||
        request = new OllamaChatRequestModel(model, messages);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaChatRequestModel request;
 | 
			
		||||
 | 
			
		||||
    public static OllamaChatRequestBuilder getInstance(String model){
 | 
			
		||||
        return new OllamaChatRequestBuilder(model, new ArrayList<>());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestModel build(){
 | 
			
		||||
        return request;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void reset(){
 | 
			
		||||
        request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){
 | 
			
		||||
        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
			
		||||
 | 
			
		||||
        List<byte[]> binaryImages = images.stream().map(file -> {
 | 
			
		||||
            try {
 | 
			
		||||
                return Files.readAllBytes(file.toPath());
 | 
			
		||||
            } catch (IOException e) {
 | 
			
		||||
                LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e);
 | 
			
		||||
                return new byte[0];
 | 
			
		||||
            }
 | 
			
		||||
        }).collect(Collectors.toList());
 | 
			
		||||
 | 
			
		||||
        messages.add(new OllamaChatMessage(role,content,binaryImages));
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){
 | 
			
		||||
        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
			
		||||
        List<byte[]> binaryImages = null;
 | 
			
		||||
        if(imageUrls.length>0){
 | 
			
		||||
            binaryImages = new ArrayList<>();
 | 
			
		||||
            for (String imageUrl : imageUrls) {
 | 
			
		||||
                try{
 | 
			
		||||
                    binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
 | 
			
		||||
                }
 | 
			
		||||
                    catch (URISyntaxException e){
 | 
			
		||||
                        LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e);
 | 
			
		||||
                }
 | 
			
		||||
                catch (IOException e){
 | 
			
		||||
                    LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        
 | 
			
		||||
        messages.add(new OllamaChatMessage(role,content,binaryImages));
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
 | 
			
		||||
        this.request.getMessages().addAll(messages);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withOptions(Options options){
 | 
			
		||||
        this.request.setOptions(options.getOptionsMap());
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withGetJsonResponse(){
 | 
			
		||||
        this.request.setReturnFormatJson(true);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withTemplate(String template){
 | 
			
		||||
        this.request.setTemplate(template);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withStreaming(){
 | 
			
		||||
        this.request.setStream(true);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatRequestBuilder withKeepAlive(String keepAlive){
 | 
			
		||||
        this.request.setKeepAlive(keepAlive);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,39 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
 | 
			
		||||
import lombok.Getter;
 | 
			
		||||
import lombok.Setter;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Defines a Request to use against the ollama /api/chat endpoint.
 | 
			
		||||
 *
 | 
			
		||||
 * @see <a href=
 | 
			
		||||
 *      "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
 | 
			
		||||
 *      Chat Completion</a>
 | 
			
		||||
 */
 | 
			
		||||
@Getter
 | 
			
		||||
@Setter
 | 
			
		||||
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
 | 
			
		||||
 | 
			
		||||
  private List<OllamaChatMessage> messages;
 | 
			
		||||
 | 
			
		||||
  public OllamaChatRequestModel() {}
 | 
			
		||||
 | 
			
		||||
  public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
 | 
			
		||||
    this.model = model;
 | 
			
		||||
    this.messages = messages;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Override
 | 
			
		||||
  public boolean equals(Object o) {
 | 
			
		||||
    if (!(o instanceof OllamaChatRequestModel)) {
 | 
			
		||||
      return false;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return this.toString().equals(o.toString());
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,23 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
public class OllamaChatResponseModel {
 | 
			
		||||
    private String model;
 | 
			
		||||
    private @JsonProperty("created_at") String createdAt;
 | 
			
		||||
    private @JsonProperty("done_reason") String doneReason;
 | 
			
		||||
    private OllamaChatMessage message;
 | 
			
		||||
    private boolean done;
 | 
			
		||||
    private String error;
 | 
			
		||||
    private List<Integer> context;
 | 
			
		||||
    private @JsonProperty("total_duration") Long totalDuration;
 | 
			
		||||
    private @JsonProperty("load_duration") Long loadDuration;
 | 
			
		||||
    private @JsonProperty("prompt_eval_duration") Long promptEvalDuration;
 | 
			
		||||
    private @JsonProperty("eval_duration") Long evalDuration;
 | 
			
		||||
    private @JsonProperty("prompt_eval_count") Integer promptEvalCount;
 | 
			
		||||
    private @JsonProperty("eval_count") Integer evalCount;
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,32 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
 | 
			
		||||
 * {@link OllamaChatMessageRole#ASSISTANT} role.
 | 
			
		||||
 */
 | 
			
		||||
public class OllamaChatResult extends OllamaResult{
 | 
			
		||||
 | 
			
		||||
    private List<OllamaChatMessage> chatHistory;
 | 
			
		||||
 | 
			
		||||
    public OllamaChatResult(String response, long responseTime, int httpStatusCode,
 | 
			
		||||
            List<OllamaChatMessage> chatHistory) {
 | 
			
		||||
        super(response, responseTime, httpStatusCode);
 | 
			
		||||
        this.chatHistory = chatHistory;
 | 
			
		||||
        appendAnswerToChatHistory(response);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public List<OllamaChatMessage> getChatHistory() {
 | 
			
		||||
        return chatHistory;
 | 
			
		||||
    } 
 | 
			
		||||
 | 
			
		||||
    private void appendAnswerToChatHistory(String answer){
 | 
			
		||||
        OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
 | 
			
		||||
        this.chatHistory.add(assistantMessage);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
			
		||||
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
 | 
			
		||||
public class OllamaChatStreamObserver {
 | 
			
		||||
 | 
			
		||||
    private OllamaStreamHandler streamHandler;
 | 
			
		||||
 | 
			
		||||
    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
			
		||||
 | 
			
		||||
    private String message = "";
 | 
			
		||||
 | 
			
		||||
    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
			
		||||
        this.streamHandler = streamHandler;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void notify(OllamaChatResponseModel currentResponsePart){
 | 
			
		||||
        responseParts.add(currentResponsePart);
 | 
			
		||||
        handleCurrentResponsePart(currentResponsePart);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
 | 
			
		||||
        message = message + currentResponsePart.getMessage().getContent();
 | 
			
		||||
        streamHandler.accept(message);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
 | 
			
		||||
@@ -7,7 +7,7 @@ import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@SuppressWarnings("unused")
 | 
			
		||||
@Data
 | 
			
		||||
public class EmbeddingResponse {
 | 
			
		||||
public class OllamaEmbeddingResponseModel {
 | 
			
		||||
    @JsonProperty("embedding")
 | 
			
		||||
    private List<Double> embedding;
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
			
		||||
 | 
			
		||||
public class OllamaEmbeddingsRequestBuilder {
 | 
			
		||||
 | 
			
		||||
    private OllamaEmbeddingsRequestBuilder(String model, String prompt){
 | 
			
		||||
        request = new OllamaEmbeddingsRequestModel(model, prompt);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaEmbeddingsRequestModel request;
 | 
			
		||||
 | 
			
		||||
    public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
 | 
			
		||||
        return new OllamaEmbeddingsRequestBuilder(model, prompt);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestModel build(){
 | 
			
		||||
        return request;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestBuilder withOptions(Options options){
 | 
			
		||||
        this.request.setOptions(options.getOptionsMap());
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
 | 
			
		||||
        this.request.setKeepAlive(keepAlive);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,31 +1,28 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
			
		||||
 | 
			
		||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
import lombok.NoArgsConstructor;
 | 
			
		||||
import lombok.NonNull;
 | 
			
		||||
import lombok.RequiredArgsConstructor;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
public class OllamaRequestModel {
 | 
			
		||||
 | 
			
		||||
@RequiredArgsConstructor
 | 
			
		||||
@NoArgsConstructor
 | 
			
		||||
public class OllamaEmbeddingsRequestModel {
 | 
			
		||||
  @NonNull
 | 
			
		||||
  private String model;
 | 
			
		||||
  @NonNull
 | 
			
		||||
  private String prompt;
 | 
			
		||||
  private List<String> images;
 | 
			
		||||
  private Map<String, Object> options;
 | 
			
		||||
 | 
			
		||||
  public OllamaRequestModel(String model, String prompt) {
 | 
			
		||||
    this.model = model;
 | 
			
		||||
    this.prompt = prompt;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public OllamaRequestModel(String model, String prompt, List<String> images) {
 | 
			
		||||
    this.model = model;
 | 
			
		||||
    this.prompt = prompt;
 | 
			
		||||
    this.images = images;
 | 
			
		||||
  }
 | 
			
		||||
  protected Map<String, Object> options;
 | 
			
		||||
  @JsonProperty(value = "keep_alive")
 | 
			
		||||
  private String keepAlive;
 | 
			
		||||
 | 
			
		||||
  @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
@@ -0,0 +1,55 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} 
 | 
			
		||||
 * objects using the builder-pattern.
 | 
			
		||||
 */
 | 
			
		||||
public class OllamaGenerateRequestBuilder {
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
			
		||||
        request = new OllamaGenerateRequestModel(model, prompt);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateRequestModel request;
 | 
			
		||||
 | 
			
		||||
    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
			
		||||
        return new OllamaGenerateRequestBuilder(model,"");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestModel build(){
 | 
			
		||||
        return request;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestBuilder withPrompt(String prompt){
 | 
			
		||||
        request.setPrompt(prompt);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    public OllamaGenerateRequestBuilder withGetJsonResponse(){
 | 
			
		||||
        this.request.setReturnFormatJson(true);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestBuilder withOptions(Options options){
 | 
			
		||||
        this.request.setOptions(options.getOptionsMap());
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestBuilder withTemplate(String template){
 | 
			
		||||
        this.request.setTemplate(template);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestBuilder withStreaming(){
 | 
			
		||||
        this.request.setStream(true);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
 | 
			
		||||
        this.request.setKeepAlive(keepAlive);
 | 
			
		||||
        return this;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,46 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import lombok.Getter;
 | 
			
		||||
import lombok.Setter;
 | 
			
		||||
 | 
			
		||||
@Getter
 | 
			
		||||
@Setter
 | 
			
		||||
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
 | 
			
		||||
 | 
			
		||||
  private String prompt;
 | 
			
		||||
  private List<String> images;
 | 
			
		||||
 | 
			
		||||
  private String system;
 | 
			
		||||
  private String context;
 | 
			
		||||
  private boolean raw;
 | 
			
		||||
 | 
			
		||||
  public OllamaGenerateRequestModel() {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public OllamaGenerateRequestModel(String model, String prompt) {
 | 
			
		||||
    this.model = model;
 | 
			
		||||
    this.prompt = prompt;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
 | 
			
		||||
    this.model = model;
 | 
			
		||||
    this.prompt = prompt;
 | 
			
		||||
    this.images = images;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
  public boolean equals(Object o) {
 | 
			
		||||
    if (!(o instanceof OllamaGenerateRequestModel)) {
 | 
			
		||||
      return false;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return this.toString().equals(o.toString());
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
@@ -8,7 +8,7 @@ import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@JsonIgnoreProperties(ignoreUnknown = true)
 | 
			
		||||
public class OllamaResponseModel {
 | 
			
		||||
public class OllamaGenerateResponseModel {
 | 
			
		||||
    private String model;
 | 
			
		||||
    private @JsonProperty("created_at") String createdAt;
 | 
			
		||||
    private String response;
 | 
			
		||||
@@ -0,0 +1,31 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
			
		||||
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
 | 
			
		||||
public class OllamaGenerateStreamObserver {
 | 
			
		||||
 | 
			
		||||
    private OllamaStreamHandler streamHandler;
 | 
			
		||||
 | 
			
		||||
    private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
 | 
			
		||||
 | 
			
		||||
    private String message = "";
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
 | 
			
		||||
        this.streamHandler = streamHandler;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void notify(OllamaGenerateResponseModel currentResponsePart){
 | 
			
		||||
        responseParts.add(currentResponsePart);
 | 
			
		||||
        handleCurrentResponsePart(currentResponsePart);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
 | 
			
		||||
        message = message + currentResponsePart.getResponse();
 | 
			
		||||
        streamHandler.accept(message);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,23 +0,0 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import lombok.AllArgsConstructor;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
@AllArgsConstructor
 | 
			
		||||
public class ModelEmbeddingsRequest {
 | 
			
		||||
  private String model;
 | 
			
		||||
  private String prompt;
 | 
			
		||||
 | 
			
		||||
  @Override
 | 
			
		||||
  public String toString() {
 | 
			
		||||
    try {
 | 
			
		||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
			
		||||
    } catch (JsonProcessingException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,55 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Specialization class for requests
 | 
			
		||||
 */
 | 
			
		||||
public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
 | 
			
		||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
 | 
			
		||||
 | 
			
		||||
    private OllamaChatStreamObserver streamObserver;
 | 
			
		||||
 | 
			
		||||
    public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
			
		||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    protected String getEndpointSuffix() {
 | 
			
		||||
        return "/api/chat";
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
			
		||||
        try {
 | 
			
		||||
            OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
 | 
			
		||||
            responseBuffer.append(ollamaResponseModel.getMessage().getContent());
 | 
			
		||||
            if (streamObserver != null) {
 | 
			
		||||
                streamObserver.notify(ollamaResponseModel);
 | 
			
		||||
            }
 | 
			
		||||
            return ollamaResponseModel.isDone();
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            LOG.error("Error parsing the Ollama chat response!", e);
 | 
			
		||||
            return true;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        streamObserver = new OllamaChatStreamObserver(streamHandler);
 | 
			
		||||
        return super.callSync(body);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,152 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import java.io.BufferedReader;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.io.InputStreamReader;
 | 
			
		||||
import java.net.URI;
 | 
			
		||||
import java.net.http.HttpClient;
 | 
			
		||||
import java.net.http.HttpRequest;
 | 
			
		||||
import java.net.http.HttpResponse;
 | 
			
		||||
import java.nio.charset.StandardCharsets;
 | 
			
		||||
import java.time.Duration;
 | 
			
		||||
import java.util.Base64;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Abstract helperclass to call the ollama api server.
 | 
			
		||||
 */
 | 
			
		||||
public abstract class OllamaEndpointCaller {
 | 
			
		||||
 | 
			
		||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class);
 | 
			
		||||
 | 
			
		||||
    private String host;
 | 
			
		||||
    private BasicAuth basicAuth;
 | 
			
		||||
    private long requestTimeoutSeconds;
 | 
			
		||||
    private boolean verbose;
 | 
			
		||||
 | 
			
		||||
    public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
			
		||||
        this.host = host;
 | 
			
		||||
        this.basicAuth = basicAuth;
 | 
			
		||||
        this.requestTimeoutSeconds = requestTimeoutSeconds;
 | 
			
		||||
        this.verbose = verbose;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    protected abstract String getEndpointSuffix();
 | 
			
		||||
 | 
			
		||||
    protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer);
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response.
 | 
			
		||||
     *
 | 
			
		||||
     * @param body POST body payload
 | 
			
		||||
     * @return result answer given by the assistant
 | 
			
		||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
			
		||||
     * @throws IOException          in case the responseStream can not be read
 | 
			
		||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
        // Create Request
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
        URI uri = URI.create(this.host + getEndpointSuffix());
 | 
			
		||||
        HttpRequest.Builder requestBuilder =
 | 
			
		||||
                getRequestBuilderDefault(uri)
 | 
			
		||||
                        .POST(
 | 
			
		||||
                                body.getBodyPublisher());
 | 
			
		||||
        HttpRequest request = requestBuilder.build();
 | 
			
		||||
        if (this.verbose) LOG.info("Asking model: " + body.toString());
 | 
			
		||||
        HttpResponse<InputStream> response =
 | 
			
		||||
                httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
 | 
			
		||||
 | 
			
		||||
        int statusCode = response.statusCode();
 | 
			
		||||
        InputStream responseBodyStream = response.body();
 | 
			
		||||
        StringBuilder responseBuffer = new StringBuilder();
 | 
			
		||||
        try (BufferedReader reader =
 | 
			
		||||
                     new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
 | 
			
		||||
            String line;
 | 
			
		||||
            while ((line = reader.readLine()) != null) {
 | 
			
		||||
                if (statusCode == 404) {
 | 
			
		||||
                    LOG.warn("Status code: 404 (Not Found)");
 | 
			
		||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
			
		||||
                            Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
			
		||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
                } else if (statusCode == 401) {
 | 
			
		||||
                    LOG.warn("Status code: 401 (Unauthorized)");
 | 
			
		||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
			
		||||
                            Utils.getObjectMapper()
 | 
			
		||||
                                    .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
 | 
			
		||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
                } else if (statusCode == 400) {
 | 
			
		||||
                    LOG.warn("Status code: 400 (Bad Request)");
 | 
			
		||||
                    OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
 | 
			
		||||
                            OllamaErrorResponseModel.class);
 | 
			
		||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
			
		||||
                } else {
 | 
			
		||||
                    boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
 | 
			
		||||
                    if (finished) {
 | 
			
		||||
                        break;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if (statusCode != 200) {
 | 
			
		||||
            LOG.error("Status code " + statusCode);
 | 
			
		||||
            throw new OllamaBaseException(responseBuffer.toString());
 | 
			
		||||
        } else {
 | 
			
		||||
            long endTime = System.currentTimeMillis();
 | 
			
		||||
            OllamaResult ollamaResult =
 | 
			
		||||
                    new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode);
 | 
			
		||||
            if (verbose) LOG.info("Model response: " + ollamaResult);
 | 
			
		||||
            return ollamaResult;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Get default request builder.
 | 
			
		||||
     *
 | 
			
		||||
     * @param uri URI to get a HttpRequest.Builder
 | 
			
		||||
     * @return HttpRequest.Builder
 | 
			
		||||
     */
 | 
			
		||||
    private HttpRequest.Builder getRequestBuilderDefault(URI uri) {
 | 
			
		||||
        HttpRequest.Builder requestBuilder =
 | 
			
		||||
                HttpRequest.newBuilder(uri)
 | 
			
		||||
                        .header("Content-Type", "application/json")
 | 
			
		||||
                        .timeout(Duration.ofSeconds(this.requestTimeoutSeconds));
 | 
			
		||||
        if (isBasicAuthCredentialsSet()) {
 | 
			
		||||
            requestBuilder.header("Authorization", getBasicAuthHeaderValue());
 | 
			
		||||
        }
 | 
			
		||||
        return requestBuilder;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Get basic authentication header value.
 | 
			
		||||
     *
 | 
			
		||||
     * @return basic authentication header value (encoded credentials)
 | 
			
		||||
     */
 | 
			
		||||
    private String getBasicAuthHeaderValue() {
 | 
			
		||||
        String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword();
 | 
			
		||||
        return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Check if Basic Auth credentials set.
 | 
			
		||||
     *
 | 
			
		||||
     * @return true when Basic Auth credentials set
 | 
			
		||||
     */
 | 
			
		||||
    private boolean isBasicAuthCredentialsSet() {
 | 
			
		||||
        return this.basicAuth != null;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,54 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
			
		||||
 | 
			
		||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateStreamObserver streamObserver;
 | 
			
		||||
 | 
			
		||||
    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
			
		||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    protected String getEndpointSuffix() {
 | 
			
		||||
        return "/api/generate";
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
			
		||||
                try {
 | 
			
		||||
                    OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
			
		||||
                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
			
		||||
                    if(streamObserver != null) {
 | 
			
		||||
                        streamObserver.notify(ollamaResponseModel);
 | 
			
		||||
                    }
 | 
			
		||||
                    return ollamaResponseModel.isDone();
 | 
			
		||||
                } catch (JsonProcessingException e) {
 | 
			
		||||
                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
			
		||||
                    return true;
 | 
			
		||||
                }         
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
			
		||||
        throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
			
		||||
    return super.callSync(body);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    
 | 
			
		||||
}
 | 
			
		||||
@@ -8,57 +8,75 @@ package io.github.amithkoujalgi.ollama4j.core.types;
 | 
			
		||||
 */
 | 
			
		||||
@SuppressWarnings("ALL")
 | 
			
		||||
public class OllamaModelType {
 | 
			
		||||
    public static final String GEMMA = "gemma";
 | 
			
		||||
    public static final String LLAMA2 = "llama2";
 | 
			
		||||
    public static final String LLAMA3 = "llama3";
 | 
			
		||||
    public static final String MISTRAL = "mistral";
 | 
			
		||||
  public static final String LLAVA = "llava";
 | 
			
		||||
    public static final String MIXTRAL = "mixtral";
 | 
			
		||||
  public static final String STARLING_LM = "starling-lm";
 | 
			
		||||
    public static final String LLAVA = "llava";
 | 
			
		||||
    public static final String LLAVA_PHI3 = "llava-phi3";
 | 
			
		||||
    public static final String NEURAL_CHAT = "neural-chat";
 | 
			
		||||
    public static final String CODELLAMA = "codellama";
 | 
			
		||||
  public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
 | 
			
		||||
    public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
 | 
			
		||||
    public static final String MISTRAL_OPENORCA = "mistral-openorca";
 | 
			
		||||
    public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
 | 
			
		||||
    public static final String PHI = "phi";
 | 
			
		||||
    public static final String PHI3 = "phi3";
 | 
			
		||||
    public static final String ORCA_MINI = "orca-mini";
 | 
			
		||||
    public static final String DEEPSEEK_CODER = "deepseek-coder";
 | 
			
		||||
    public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
 | 
			
		||||
    public static final String VICUNA = "vicuna";
 | 
			
		||||
    public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
 | 
			
		||||
  public static final String PHIND_CODELLAMA = "phind-codellama";
 | 
			
		||||
  public static final String PHI = "phi";
 | 
			
		||||
    public static final String ZEPHYR = "zephyr";
 | 
			
		||||
    public static final String OPENHERMES = "openhermes";
 | 
			
		||||
    public static final String QWEN = "qwen";
 | 
			
		||||
    public static final String WIZARDCODER = "wizardcoder";
 | 
			
		||||
  public static final String MISTRAL_OPENORCA = "mistral-openorca";
 | 
			
		||||
  public static final String NOUS_HERMES = "nous-hermes";
 | 
			
		||||
  public static final String DEEPSEEK_CODER = "deepseek-coder";
 | 
			
		||||
  public static final String WIZARD_MATH = "wizard-math";
 | 
			
		||||
    public static final String LLAMA2_CHINESE = "llama2-chinese";
 | 
			
		||||
  public static final String FALCON = "falcon";
 | 
			
		||||
  public static final String ORCA2 = "orca2";
 | 
			
		||||
  public static final String STABLE_BELUGA = "stable-beluga";
 | 
			
		||||
  public static final String CODEUP = "codeup";
 | 
			
		||||
  public static final String EVERYTHINGLM = "everythinglm";
 | 
			
		||||
  public static final String MEDLLAMA2 = "medllama2";
 | 
			
		||||
  public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
 | 
			
		||||
  public static final String STARCODER = "starcoder";
 | 
			
		||||
  public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
 | 
			
		||||
    public static final String TINYLLAMA = "tinyllama";
 | 
			
		||||
    public static final String PHIND_CODELLAMA = "phind-codellama";
 | 
			
		||||
    public static final String OPENCHAT = "openchat";
 | 
			
		||||
  public static final String WIZARD_VICUNA = "wizard-vicuna";
 | 
			
		||||
  public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
 | 
			
		||||
  public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
 | 
			
		||||
    public static final String ORCA2 = "orca2";
 | 
			
		||||
    public static final String FALCON = "falcon";
 | 
			
		||||
    public static final String WIZARD_MATH = "wizard-math";
 | 
			
		||||
    public static final String TINYDOLPHIN = "tinydolphin";
 | 
			
		||||
    public static final String NOUS_HERMES = "nous-hermes";
 | 
			
		||||
    public static final String YI = "yi";
 | 
			
		||||
  public static final String YARN_MISTRAL = "yarn-mistral";
 | 
			
		||||
  public static final String SAMANTHA_MISTRAL = "samantha-mistral";
 | 
			
		||||
  public static final String SQLCODER = "sqlcoder";
 | 
			
		||||
  public static final String YARN_LLAMA2 = "yarn-llama2";
 | 
			
		||||
  public static final String MEDITRON = "meditron";
 | 
			
		||||
  public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
 | 
			
		||||
  public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
 | 
			
		||||
  public static final String DEEPSEEK_LLM = "deepseek-llm";
 | 
			
		||||
  public static final String MISTRALLITE = "mistrallite";
 | 
			
		||||
  public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
 | 
			
		||||
  public static final String WIZARDLM = "wizardlm";
 | 
			
		||||
  public static final String CODEBOOGA = "codebooga";
 | 
			
		||||
  public static final String MAGICODER = "magicoder";
 | 
			
		||||
  public static final String GOLIATH = "goliath";
 | 
			
		||||
  public static final String NEXUSRAVEN = "nexusraven";
 | 
			
		||||
  public static final String ALFRED = "alfred";
 | 
			
		||||
  public static final String XWINLM = "xwinlm";
 | 
			
		||||
    public static final String DOLPHIN_PHI = "dolphin-phi";
 | 
			
		||||
    public static final String STARLING_LM = "starling-lm";
 | 
			
		||||
    public static final String STARCODER = "starcoder";
 | 
			
		||||
    public static final String CODEUP = "codeup";
 | 
			
		||||
    public static final String MEDLLAMA2 = "medllama2";
 | 
			
		||||
    public static final String STABLE_CODE = "stable-code";
 | 
			
		||||
    public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
 | 
			
		||||
    public static final String BAKLLAVA = "bakllava";
 | 
			
		||||
    public static final String EVERYTHINGLM = "everythinglm";
 | 
			
		||||
    public static final String SOLAR = "solar";
 | 
			
		||||
    public static final String STABLE_BELUGA = "stable-beluga";
 | 
			
		||||
    public static final String SQLCODER = "sqlcoder";
 | 
			
		||||
    public static final String YARN_MISTRAL = "yarn-mistral";
 | 
			
		||||
    public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
 | 
			
		||||
    public static final String SAMANTHA_MISTRAL = "samantha-mistral";
 | 
			
		||||
    public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
 | 
			
		||||
    public static final String MEDITRON = "meditron";
 | 
			
		||||
    public static final String WIZARD_VICUNA = "wizard-vicuna";
 | 
			
		||||
    public static final String STABLELM2 = "stablelm2";
 | 
			
		||||
    public static final String MAGICODER = "magicoder";
 | 
			
		||||
    public static final String YARN_LLAMA2 = "yarn-llama2";
 | 
			
		||||
    public static final String NOUS_HERMES2 = "nous-hermes2";
 | 
			
		||||
    public static final String DEEPSEEK_LLM = "deepseek-llm";
 | 
			
		||||
    public static final String LLAMA_PRO = "llama-pro";
 | 
			
		||||
    public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
 | 
			
		||||
    public static final String CODEBOOGA = "codebooga";
 | 
			
		||||
    public static final String MISTRALLITE = "mistrallite";
 | 
			
		||||
    public static final String NEXUSRAVEN = "nexusraven";
 | 
			
		||||
    public static final String GOLIATH = "goliath";
 | 
			
		||||
    public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
 | 
			
		||||
    public static final String NOTUX = "notux";
 | 
			
		||||
    public static final String ALFRED = "alfred";
 | 
			
		||||
    public static final String MEGADOLPHIN = "megadolphin";
 | 
			
		||||
    public static final String WIZARDLM = "wizardlm";
 | 
			
		||||
    public static final String XWINLM = "xwinlm";
 | 
			
		||||
    public static final String NOTUS = "notus";
 | 
			
		||||
    public static final String DUCKDB_NSQL = "duckdb-nsql";
 | 
			
		||||
    public static final String ALL_MINILM = "all-minilm";
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,21 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonGenerator;
 | 
			
		||||
import com.fasterxml.jackson.databind.JsonSerializer;
 | 
			
		||||
import com.fasterxml.jackson.databind.SerializerProvider;
 | 
			
		||||
 | 
			
		||||
public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
 | 
			
		||||
            gen.writeString("json");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    public boolean isEmpty(SerializerProvider provider,Boolean value){
 | 
			
		||||
        return !value;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,21 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.util.Base64;
 | 
			
		||||
import java.util.Collection;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonGenerator;
 | 
			
		||||
import com.fasterxml.jackson.databind.JsonSerializer;
 | 
			
		||||
import com.fasterxml.jackson.databind.SerializerProvider;
 | 
			
		||||
 | 
			
		||||
public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
 | 
			
		||||
 | 
			
		||||
    @Override
 | 
			
		||||
    public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException {
 | 
			
		||||
        jsonGenerator.writeStartArray();
 | 
			
		||||
        for (byte[] file : value) {
 | 
			
		||||
            jsonGenerator.writeString(Base64.getEncoder().encodeToString(file));
 | 
			
		||||
        }
 | 
			
		||||
        jsonGenerator.writeEndArray();
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,28 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
			
		||||
 | 
			
		||||
import java.net.http.HttpRequest.BodyPublisher;
 | 
			
		||||
import java.net.http.HttpRequest.BodyPublishers;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnore;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}.
 | 
			
		||||
 */
 | 
			
		||||
public interface OllamaRequestBody {
 | 
			
		||||
    
 | 
			
		||||
    /**
 | 
			
		||||
     * Transforms the OllamaRequest Object to a JSON Object via Jackson.
 | 
			
		||||
     * 
 | 
			
		||||
     * @return JSON representation of a OllamaRequest
 | 
			
		||||
     */
 | 
			
		||||
    @JsonIgnore
 | 
			
		||||
    default BodyPublisher getBodyPublisher(){
 | 
			
		||||
                try {
 | 
			
		||||
          return BodyPublishers.ofString(
 | 
			
		||||
                      Utils.getObjectMapper().writeValueAsString(this));
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
          throw new IllegalArgumentException("Request not Body convertible.",e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,9 +1,38 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
			
		||||
 | 
			
		||||
import java.io.ByteArrayOutputStream;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.net.URI;
 | 
			
		||||
import java.net.URISyntaxException;
 | 
			
		||||
import java.net.URL;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
 | 
			
		||||
 | 
			
		||||
public class Utils {
 | 
			
		||||
 | 
			
		||||
  private static ObjectMapper objectMapper;
 | 
			
		||||
 | 
			
		||||
  public static ObjectMapper getObjectMapper() {
 | 
			
		||||
    return new ObjectMapper();
 | 
			
		||||
    if(objectMapper == null) {
 | 
			
		||||
      objectMapper = new ObjectMapper();
 | 
			
		||||
      objectMapper.registerModule(new JavaTimeModule());
 | 
			
		||||
    }
 | 
			
		||||
    return objectMapper;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public static byte[] loadImageBytesFromUrl(String imageUrl)
 | 
			
		||||
      throws IOException, URISyntaxException {
 | 
			
		||||
    URL url = new URI(imageUrl).toURL();
 | 
			
		||||
    try (InputStream in = url.openStream();
 | 
			
		||||
        ByteArrayOutputStream out = new ByteArrayOutputStream()) {
 | 
			
		||||
      byte[] buffer = new byte[1024];
 | 
			
		||||
      int bytesRead;
 | 
			
		||||
      while ((bytesRead = in.read(buffer)) != -1) {
 | 
			
		||||
        out.write(buffer, 0, bytesRead);
 | 
			
		||||
      }
 | 
			
		||||
      return out.toByteArray();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,38 +4,48 @@ import static org.junit.jupiter.api.Assertions.*;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.net.ConnectException;
 | 
			
		||||
import java.net.URISyntaxException;
 | 
			
		||||
import java.net.http.HttpConnectTimeoutException;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Objects;
 | 
			
		||||
import java.util.Properties;
 | 
			
		||||
import lombok.Data;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Order;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
import org.slf4j.Logger;
 | 
			
		||||
import org.slf4j.LoggerFactory;
 | 
			
		||||
 | 
			
		||||
class TestRealAPIs {
 | 
			
		||||
  OllamaAPI ollamaAPI;
 | 
			
		||||
 | 
			
		||||
  private Properties loadProperties() {
 | 
			
		||||
    Properties properties = new Properties();
 | 
			
		||||
    try (InputStream input =
 | 
			
		||||
        getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
 | 
			
		||||
      if (input == null) {
 | 
			
		||||
        throw new RuntimeException("Sorry, unable to find test-config.properties");
 | 
			
		||||
      }
 | 
			
		||||
      properties.load(input);
 | 
			
		||||
      return properties;
 | 
			
		||||
    } catch (IOException e) {
 | 
			
		||||
      throw new RuntimeException("Error loading properties", e);
 | 
			
		||||
    }
 | 
			
		||||
  private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
 | 
			
		||||
 | 
			
		||||
  OllamaAPI ollamaAPI;
 | 
			
		||||
  Config config;
 | 
			
		||||
 | 
			
		||||
  private File getImageFileFromClasspath(String fileName) {
 | 
			
		||||
    ClassLoader classLoader = getClass().getClassLoader();
 | 
			
		||||
    return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @BeforeEach
 | 
			
		||||
  void setUp() {
 | 
			
		||||
    Properties properties = loadProperties();
 | 
			
		||||
    ollamaAPI = new OllamaAPI(properties.getProperty("ollama.api.url"));
 | 
			
		||||
    config = new Config();
 | 
			
		||||
    ollamaAPI = new OllamaAPI(config.getOllamaURL());
 | 
			
		||||
    ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
@@ -53,7 +63,7 @@ class TestRealAPIs {
 | 
			
		||||
    } catch (HttpConnectTimeoutException e) {
 | 
			
		||||
      fail(e.getMessage());
 | 
			
		||||
    } catch (Exception e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -65,7 +75,7 @@ class TestRealAPIs {
 | 
			
		||||
      assertNotNull(ollamaAPI.listModels());
 | 
			
		||||
      ollamaAPI.listModels().forEach(System.out::println);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -74,13 +84,310 @@ class TestRealAPIs {
 | 
			
		||||
  void testPullModel() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      ollamaAPI.pullModel(OllamaModelType.LLAMA2);
 | 
			
		||||
      ollamaAPI.pullModel(config.getModel());
 | 
			
		||||
      boolean found =
 | 
			
		||||
          ollamaAPI.listModels().stream()
 | 
			
		||||
              .anyMatch(model -> model.getModelName().equals(OllamaModelType.LLAMA2));
 | 
			
		||||
              .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
			
		||||
      assertTrue(found);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testListDtails() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
			
		||||
      assertNotNull(modelDetails);
 | 
			
		||||
      System.out.println(modelDetails);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithDefaultOptions() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaResult result =
 | 
			
		||||
          ollamaAPI.generate(
 | 
			
		||||
              config.getModel(),
 | 
			
		||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
              new OptionsBuilder().build());
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithDefaultOptionsStreamed() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
 | 
			
		||||
      StringBuffer sb = new StringBuffer("");
 | 
			
		||||
 | 
			
		||||
      OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
			
		||||
          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
          new OptionsBuilder().build(), (s) -> {
 | 
			
		||||
            LOG.info(s);
 | 
			
		||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
            LOG.info(substring);
 | 
			
		||||
            sb.append(substring);
 | 
			
		||||
          });
 | 
			
		||||
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithOptions() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaResult result =
 | 
			
		||||
          ollamaAPI.generate(
 | 
			
		||||
              config.getModel(),
 | 
			
		||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
			
		||||
              new OptionsBuilder().setTemperature(0.9f).build());
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testChat() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
			
		||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
			
		||||
             .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
 | 
			
		||||
             .withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
 | 
			
		||||
             .build();
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
			
		||||
      assertEquals(4,chatResult.getChatHistory().size());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testChatWithSystemPrompt() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
			
		||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
 | 
			
		||||
          "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
			
		||||
          .withMessage(OllamaChatMessageRole.USER,
 | 
			
		||||
              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
			
		||||
          .build();
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
			
		||||
      assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
			
		||||
      assertEquals(3, chatResult.getChatHistory().size());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testChatWithStream() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
			
		||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
			
		||||
              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
			
		||||
          .build();
 | 
			
		||||
 | 
			
		||||
      StringBuffer sb = new StringBuffer("");
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
 | 
			
		||||
        LOG.info(s);
 | 
			
		||||
        String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
        LOG.info(substring);
 | 
			
		||||
        sb.append(substring);
 | 
			
		||||
      });
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testChatWithImageFromFileWithHistoryRecognition() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaChatRequestBuilder builder =
 | 
			
		||||
          OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
			
		||||
      OllamaChatRequestModel requestModel =
 | 
			
		||||
          builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
			
		||||
              List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertNotNull(chatResult.getResponse());
 | 
			
		||||
 | 
			
		||||
      builder.reset();
 | 
			
		||||
 | 
			
		||||
      requestModel =
 | 
			
		||||
          builder.withMessages(chatResult.getChatHistory())
 | 
			
		||||
            .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
 | 
			
		||||
 | 
			
		||||
      chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
      assertNotNull(chatResult.getResponse());
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testChatWithImageFromURL() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
			
		||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
			
		||||
      "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
 | 
			
		||||
             .build();
 | 
			
		||||
 | 
			
		||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
			
		||||
      assertNotNull(chatResult);
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithOptionsAndImageFiles() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaResult result =
 | 
			
		||||
          ollamaAPI.generateWithImageFiles(
 | 
			
		||||
              config.getImageModel(),
 | 
			
		||||
              "What is in this image?",
 | 
			
		||||
              List.of(imageFile),
 | 
			
		||||
              new OptionsBuilder().build());
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
			
		||||
    try {
 | 
			
		||||
      StringBuffer sb = new StringBuffer("");
 | 
			
		||||
 | 
			
		||||
      OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
			
		||||
          "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
			
		||||
            LOG.info(s);
 | 
			
		||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
			
		||||
            LOG.info(substring);
 | 
			
		||||
            sb.append(substring);
 | 
			
		||||
          });
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  void testAskModelWithOptionsAndImageURLs() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaResult result =
 | 
			
		||||
          ollamaAPI.generateWithImageURLs(
 | 
			
		||||
              config.getImageModel(),
 | 
			
		||||
              "What is in this image?",
 | 
			
		||||
              List.of(
 | 
			
		||||
                  "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
 | 
			
		||||
              new OptionsBuilder().build());
 | 
			
		||||
      assertNotNull(result);
 | 
			
		||||
      assertNotNull(result.getResponse());
 | 
			
		||||
      assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Test
 | 
			
		||||
  @Order(3)
 | 
			
		||||
  public void testEmbedding() {
 | 
			
		||||
    testEndpointReachability();
 | 
			
		||||
    try {
 | 
			
		||||
      OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
 | 
			
		||||
          .getInstance(config.getModel(), "What is the capital of France?").build();
 | 
			
		||||
 | 
			
		||||
      List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
 | 
			
		||||
 | 
			
		||||
      assertNotNull(embeddings);
 | 
			
		||||
      assertFalse(embeddings.isEmpty());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      fail(e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@Data
 | 
			
		||||
class Config {
 | 
			
		||||
  private String ollamaURL;
 | 
			
		||||
  private String model;
 | 
			
		||||
  private String imageModel;
 | 
			
		||||
  private int requestTimeoutSeconds;
 | 
			
		||||
 | 
			
		||||
  public Config() {
 | 
			
		||||
    Properties properties = new Properties();
 | 
			
		||||
    try (InputStream input =
 | 
			
		||||
        getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
 | 
			
		||||
      if (input == null) {
 | 
			
		||||
        throw new RuntimeException("Sorry, unable to find test-config.properties");
 | 
			
		||||
      }
 | 
			
		||||
      properties.load(input);
 | 
			
		||||
      this.ollamaURL = properties.getProperty("ollama.url");
 | 
			
		||||
      this.model = properties.getProperty("ollama.model");
 | 
			
		||||
      this.imageModel = properties.getProperty("ollama.model.image");
 | 
			
		||||
      this.requestTimeoutSeconds =
 | 
			
		||||
          Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
 | 
			
		||||
    } catch (IOException e) {
 | 
			
		||||
      throw new RuntimeException("Error loading properties", e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -103,10 +103,10 @@ class TestMockedAPIs {
 | 
			
		||||
    String prompt = "some prompt text";
 | 
			
		||||
    OptionsBuilder optionsBuilder = new OptionsBuilder();
 | 
			
		||||
    try {
 | 
			
		||||
      when(ollamaAPI.ask(model, prompt, optionsBuilder.build()))
 | 
			
		||||
      when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
 | 
			
		||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
			
		||||
      ollamaAPI.ask(model, prompt, optionsBuilder.build());
 | 
			
		||||
      verify(ollamaAPI, times(1)).ask(model, prompt, optionsBuilder.build());
 | 
			
		||||
      ollamaAPI.generate(model, prompt, optionsBuilder.build());
 | 
			
		||||
      verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
@@ -118,10 +118,14 @@ class TestMockedAPIs {
 | 
			
		||||
    String model = OllamaModelType.LLAMA2;
 | 
			
		||||
    String prompt = "some prompt text";
 | 
			
		||||
    try {
 | 
			
		||||
      when(ollamaAPI.askWithImageFiles(model, prompt, Collections.emptyList()))
 | 
			
		||||
      when(ollamaAPI.generateWithImageFiles(
 | 
			
		||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
			
		||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
			
		||||
      ollamaAPI.askWithImageFiles(model, prompt, Collections.emptyList());
 | 
			
		||||
      verify(ollamaAPI, times(1)).askWithImageFiles(model, prompt, Collections.emptyList());
 | 
			
		||||
      ollamaAPI.generateWithImageFiles(
 | 
			
		||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
			
		||||
      verify(ollamaAPI, times(1))
 | 
			
		||||
          .generateWithImageFiles(
 | 
			
		||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
@@ -133,10 +137,14 @@ class TestMockedAPIs {
 | 
			
		||||
    String model = OllamaModelType.LLAMA2;
 | 
			
		||||
    String prompt = "some prompt text";
 | 
			
		||||
    try {
 | 
			
		||||
      when(ollamaAPI.askWithImageURLs(model, prompt, Collections.emptyList()))
 | 
			
		||||
      when(ollamaAPI.generateWithImageURLs(
 | 
			
		||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
			
		||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
			
		||||
      ollamaAPI.askWithImageURLs(model, prompt, Collections.emptyList());
 | 
			
		||||
      verify(ollamaAPI, times(1)).askWithImageURLs(model, prompt, Collections.emptyList());
 | 
			
		||||
      ollamaAPI.generateWithImageURLs(
 | 
			
		||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
			
		||||
      verify(ollamaAPI, times(1))
 | 
			
		||||
          .generateWithImageURLs(
 | 
			
		||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
			
		||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
			
		||||
      throw new RuntimeException(e);
 | 
			
		||||
    }
 | 
			
		||||
@@ -147,9 +155,9 @@ class TestMockedAPIs {
 | 
			
		||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
			
		||||
    String model = OllamaModelType.LLAMA2;
 | 
			
		||||
    String prompt = "some prompt text";
 | 
			
		||||
    when(ollamaAPI.askAsync(model, prompt))
 | 
			
		||||
    when(ollamaAPI.generateAsync(model, prompt))
 | 
			
		||||
        .thenReturn(new OllamaAsyncResultCallback(null, null, 3));
 | 
			
		||||
    ollamaAPI.askAsync(model, prompt);
 | 
			
		||||
    verify(ollamaAPI, times(1)).askAsync(model, prompt);
 | 
			
		||||
    ollamaAPI.generateAsync(model, prompt);
 | 
			
		||||
    verify(ollamaAPI, times(1)).generateAsync(model, prompt);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -0,0 +1,35 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.fail;
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
			
		||||
 | 
			
		||||
public abstract class AbstractSerializationTest<T> {
 | 
			
		||||
 | 
			
		||||
    protected ObjectMapper mapper = Utils.getObjectMapper();
 | 
			
		||||
 | 
			
		||||
    protected String serialize(T obj) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.writeValueAsString(obj);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not serialize request!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    protected T deserialize(String jsonObject, Class<T> deserializationClass) {
 | 
			
		||||
        try {
 | 
			
		||||
            return mapper.readValue(jsonObject, deserializationClass);
 | 
			
		||||
        } catch (JsonProcessingException e) {
 | 
			
		||||
            fail("Could not deserialize jsonObject!", e);
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
 | 
			
		||||
        T req) {
 | 
			
		||||
        assertEquals(req, unmarshalledObject);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,113 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
 | 
			
		||||
import org.json.JSONObject;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
 | 
			
		||||
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
 | 
			
		||||
 | 
			
		||||
    private OllamaChatRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
    @BeforeEach
 | 
			
		||||
    public void init() {
 | 
			
		||||
        builder = OllamaChatRequestBuilder.getInstance("DummyModel");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestOnlyMandatoryFields() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestMultipleMessages() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
			
		||||
        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
			
		||||
        .build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestWithMessageAndImage() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
			
		||||
                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestWithOptions() {
 | 
			
		||||
        OptionsBuilder b = new OptionsBuilder();
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
			
		||||
            .withOptions(b.setMirostat(1).build())
 | 
			
		||||
            .withOptions(b.setTemperature(1L).build())
 | 
			
		||||
            .withOptions(b.setMirostatEta(1L).build())
 | 
			
		||||
            .withOptions(b.setMirostatTau(1L).build())
 | 
			
		||||
            .withOptions(b.setNumGpu(1).build())
 | 
			
		||||
            .withOptions(b.setSeed(1).build())
 | 
			
		||||
            .withOptions(b.setTopK(1).build())
 | 
			
		||||
            .withOptions(b.setTopP(1).build())
 | 
			
		||||
            .build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
			
		||||
        assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
 | 
			
		||||
        assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("seed"));
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("top_k"));
 | 
			
		||||
        assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testWithJsonFormat() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
			
		||||
                .withGetJsonResponse().build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
			
		||||
        // of request is never used in real code anyways
 | 
			
		||||
        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
			
		||||
        String requestFormatProperty = jsonObject.getString("format");
 | 
			
		||||
        assertEquals("json", requestFormatProperty);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testWithTemplate() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withTemplate("System Template")
 | 
			
		||||
            .build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testWithStreaming() {
 | 
			
		||||
        OllamaChatRequestModel req = builder.withStreaming().build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testWithKeepAlive() {
 | 
			
		||||
        String expectedKeepAlive = "5m";
 | 
			
		||||
        OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
 | 
			
		||||
            .build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,37 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
 | 
			
		||||
public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
 | 
			
		||||
 | 
			
		||||
        private OllamaEmbeddingsRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
        @BeforeEach
 | 
			
		||||
        public void init() {
 | 
			
		||||
            builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt");
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
            @Test
 | 
			
		||||
    public void testRequestOnlyMandatoryFields() {
 | 
			
		||||
        OllamaEmbeddingsRequestModel req = builder.build();
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
        @Test
 | 
			
		||||
        public void testRequestWithOptions() {
 | 
			
		||||
            OptionsBuilder b = new OptionsBuilder();
 | 
			
		||||
            OllamaEmbeddingsRequestModel req = builder
 | 
			
		||||
                    .withOptions(b.setMirostat(1).build()).build();
 | 
			
		||||
 | 
			
		||||
            String jsonRequest = serialize(req);
 | 
			
		||||
            OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
 | 
			
		||||
            assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
            assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
        }
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,56 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
			
		||||
 | 
			
		||||
import org.json.JSONObject;
 | 
			
		||||
import org.junit.jupiter.api.BeforeEach;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
			
		||||
 | 
			
		||||
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
 | 
			
		||||
 | 
			
		||||
    private OllamaGenerateRequestBuilder builder;
 | 
			
		||||
 | 
			
		||||
    @BeforeEach
 | 
			
		||||
    public void init() {
 | 
			
		||||
        builder = OllamaGenerateRequestBuilder.getInstance("DummyModel");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestOnlyMandatoryFields() {
 | 
			
		||||
        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testRequestWithOptions() {
 | 
			
		||||
        OptionsBuilder b = new OptionsBuilder();
 | 
			
		||||
        OllamaGenerateRequestModel req =
 | 
			
		||||
                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
 | 
			
		||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
			
		||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testWithJsonFormat() {
 | 
			
		||||
        OllamaGenerateRequestModel req =
 | 
			
		||||
                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
			
		||||
 | 
			
		||||
        String jsonRequest = serialize(req);
 | 
			
		||||
        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
			
		||||
        // of request is never used in real code anyways
 | 
			
		||||
        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
			
		||||
        String requestFormatProperty = jsonObject.getString("format");
 | 
			
		||||
        assertEquals("json", requestFormatProperty);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -0,0 +1,42 @@
 | 
			
		||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
			
		||||
 | 
			
		||||
import io.github.amithkoujalgi.ollama4j.core.models.Model;
 | 
			
		||||
import org.junit.jupiter.api.Test;
 | 
			
		||||
 | 
			
		||||
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testDeserializationOfModelResponseWithOffsetTime(){
 | 
			
		||||
        String serializedTestStringWithOffsetTime = "{\n"
 | 
			
		||||
                + "\"name\": \"codellama:13b\",\n"
 | 
			
		||||
                + "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n"
 | 
			
		||||
                + "\"size\": 7365960935,\n"
 | 
			
		||||
                + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
 | 
			
		||||
                + "\"details\": {\n"
 | 
			
		||||
                + "\"format\": \"gguf\",\n"
 | 
			
		||||
                + "\"family\": \"llama\",\n"
 | 
			
		||||
                + "\"families\": null,\n"
 | 
			
		||||
                + "\"parameter_size\": \"13B\",\n"
 | 
			
		||||
                + "\"quantization_level\": \"Q4_0\"\n"
 | 
			
		||||
                + "}}";
 | 
			
		||||
        deserialize(serializedTestStringWithOffsetTime,Model.class);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    public void testDeserializationOfModelResponseWithZuluTime(){
 | 
			
		||||
        String serializedTestStringWithZuluTimezone = "{\n"
 | 
			
		||||
                + "\"name\": \"codellama:13b\",\n"
 | 
			
		||||
                + "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n"
 | 
			
		||||
                + "\"size\": 7365960935,\n"
 | 
			
		||||
                + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n"
 | 
			
		||||
                + "\"details\": {\n"
 | 
			
		||||
                + "\"format\": \"gguf\",\n"
 | 
			
		||||
                + "\"family\": \"llama\",\n"
 | 
			
		||||
                + "\"families\": null,\n"
 | 
			
		||||
                + "\"parameter_size\": \"13B\",\n"
 | 
			
		||||
                + "\"quantization_level\": \"Q4_0\"\n"
 | 
			
		||||
                + "}}";
 | 
			
		||||
        deserialize(serializedTestStringWithZuluTimezone,Model.class);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								src/test/resources/dog-on-a-boat.jpg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/test/resources/dog-on-a-boat.jpg
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| 
		 After Width: | Height: | Size: 52 KiB  | 
@@ -1 +1,4 @@
 | 
			
		||||
ollama.api.url=http://192.168.29.223:11434
 | 
			
		||||
ollama.url=http://localhost:11434
 | 
			
		||||
ollama.model=qwen:0.5b
 | 
			
		||||
ollama.model.image=llava
 | 
			
		||||
ollama.request-timeout-seconds=120
 | 
			
		||||
		Reference in New Issue
	
	Block a user