mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-11-04 02:20:50 +01:00 
			
		
		
		
	Compare commits
	
		
			25 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 893e5dd763 | |||
| c520604f4b | |||
| a85c23d64a | |||
| d32a8b7d88 | |||
| 
						 | 
					992625cf86 | ||
| 
						 | 
					bbebd26d07 | ||
| 
						 | 
					3aa0fc77cb | ||
| 
						 | 
					11a98a72a1 | ||
| 
						 | 
					422601c0fc | ||
| 
						 | 
					75e6576a13 | ||
| 
						 | 
					51dd3f3e1e | ||
| 
						 | 
					30250f79d9 | ||
| 
						 | 
					d4ee9ed051 | ||
| 
						 | 
					4412ac683a | ||
| 
						 | 
					b5b1a26941 | ||
| 
						 | 
					a84230bbd1 | ||
| 
						 | 
					00c9b16556 | ||
| 
						 | 
					9a2194334f | ||
| 
						 | 
					f9cf11ecdf | ||
| 
						 | 
					0af80865c3 | ||
| 
						 | 
					a304c01194 | ||
| 
						 | 
					887708864e | ||
| 
						 | 
					2f0c4fdcc9 | ||
| 
						 | 
					73aabd7ca6 | ||
| 
						 | 
					17ca2bdee3 | 
							
								
								
									
										32
									
								
								.gitea/workflows/publish.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								.gitea/workflows/publish.yaml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,32 @@
 | 
				
			|||||||
 | 
					name: Build and Publish
 | 
				
			||||||
 | 
					on: push
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					jobs:
 | 
				
			||||||
 | 
					  build:
 | 
				
			||||||
 | 
					    runs-on: standard-22.04
 | 
				
			||||||
 | 
					    steps:
 | 
				
			||||||
 | 
					      - name: Check out
 | 
				
			||||||
 | 
					        uses: actions/checkout@v4
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Set Up Java
 | 
				
			||||||
 | 
					        uses: actions/setup-java@v4
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          distribution: 'temurin'
 | 
				
			||||||
 | 
					          java-version: '21'
 | 
				
			||||||
 | 
					          #cache: 'maven'
 | 
				
			||||||
 | 
					          #server-id: 'gitea'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Set up Maven
 | 
				
			||||||
 | 
					        uses: stCarolas/setup-maven@v5
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          maven-version: 3.8.2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - run: cat /root/.m2/toolchains.xml
 | 
				
			||||||
 | 
					      - run: cat /root/.m2/settings.xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Build
 | 
				
			||||||
 | 
					        run: mvn -B package --file pom.xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Publish
 | 
				
			||||||
 | 
					        run: mvn deploy
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -39,3 +39,6 @@ build/
 | 
				
			|||||||
/.idea/
 | 
					/.idea/
 | 
				
			||||||
pom.xml.*
 | 
					pom.xml.*
 | 
				
			||||||
release.properties
 | 
					release.properties
 | 
				
			||||||
 | 
					!.idea/icon.svg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					src/main/java/io/github/ollama4j/localtests
 | 
				
			||||||
							
								
								
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							@@ -0,0 +1,18 @@
 | 
				
			|||||||
 | 
					<?xml version="1.0" encoding="UTF-8"?>
 | 
				
			||||||
 | 
					<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
 | 
				
			||||||
 | 
					<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
 | 
				
			||||||
 | 
					<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
 | 
				
			||||||
 | 
					<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
 | 
				
			||||||
 | 
					<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
 | 
				
			||||||
 | 
					<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
 | 
				
			||||||
 | 
					<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
 | 
				
			||||||
 | 
					<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
 | 
				
			||||||
 | 
					</svg>
 | 
				
			||||||
| 
		 After Width: | Height: | Size: 6.1 KiB  | 
							
								
								
									
										71
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										71
									
								
								README.md
									
									
									
									
									
								
							@@ -9,12 +9,15 @@ A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
 | 
					Find more details on the [website](https://ollama4j.github.io/ollama4j/).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<div align="center">
 | 
				
			||||||
 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[//]: # ()
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[//]: # ()
 | 
					[//]: # ()
 | 
				
			||||||
@@ -32,6 +35,7 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
 | 
				
			|||||||
[](https://codecov.io/gh/ollama4j/ollama4j)
 | 
					[](https://codecov.io/gh/ollama4j/ollama4j)
 | 
				
			||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[//]: # ()
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -67,40 +71,37 @@ Find more details on the [website](https://ollama4j.github.io/ollama4j/).
 | 
				
			|||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://ollama.com/" target="_blank">
 | 
				
			||||||
 | 
					  <img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
 | 
				
			||||||
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<table>
 | 
					<table>
 | 
				
			||||||
<tr>
 | 
					<tr>
 | 
				
			||||||
<td> 
 | 
					<td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[![][ollama-shield]][ollama-link]
 | 
					<a href="https://ollama.ai/" target="_blank">Local Installation</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
</td> 
 | 
					</td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<td> 
 | 
					<td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[![][ollama-docker-shield]][ollama-docker]
 | 
					<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
</td>
 | 
					</td>
 | 
				
			||||||
</tr>
 | 
					</tr>
 | 
				
			||||||
<tr>
 | 
					<tr>
 | 
				
			||||||
<td>
 | 
					<td>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
macOS
 | 
					Install on Linux
 | 
				
			||||||
 | 
					 | 
				
			||||||
https://ollama.com/download/Ollama-darwin.zip
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Linux
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
```shell 
 | 
					```shell 
 | 
				
			||||||
curl -fsSL https://ollama.com/install.sh \| sh
 | 
					curl -fsSL https://ollama.com/install.sh | sh
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Windows
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
https://ollama.com/download/OllamaSetup.exe
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
</td>
 | 
					</td>
 | 
				
			||||||
<td>
 | 
					<td>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -129,14 +130,6 @@ docker run -d -p 11434:11434 \
 | 
				
			|||||||
</tr>
 | 
					</tr>
 | 
				
			||||||
</table>
 | 
					</table>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[ollama-link]: https://ollama.ai/
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
## Installation
 | 
					## Installation
 | 
				
			||||||
 | 
					
 | 
				
			||||||
> [!NOTE]
 | 
					> [!NOTE]
 | 
				
			||||||
@@ -162,7 +155,7 @@ In your Maven project, add this dependency:
 | 
				
			|||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.ollama4j</groupId>
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.78</version>
 | 
					    <version>1.0.79</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -218,7 +211,7 @@ In your Maven project, add this dependency:
 | 
				
			|||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.ollama4j</groupId>
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.78</version>
 | 
					    <version>1.0.79</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -228,7 +221,7 @@ In your Maven project, add this dependency:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```groovy
 | 
					```groovy
 | 
				
			||||||
dependencies {
 | 
					dependencies {
 | 
				
			||||||
  implementation 'com.github.ollama4j:ollama4j:1.0.78'
 | 
					    implementation 'io.github.ollama4j:ollama4j:1.0.79'
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -284,37 +277,13 @@ Newer artifacts are published via GitHub Actions CI workflow when a new release
 | 
				
			|||||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
 | 
					- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
 | 
				
			||||||
  server to translate all messages into a specfic target language.
 | 
					  server to translate all messages into a specfic target language.
 | 
				
			||||||
    - https://github.com/liebki/ollama-translator
 | 
					    - https://github.com/liebki/ollama-translator
 | 
				
			||||||
 | 
					- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
 | 
				
			||||||
 | 
					  Ollama4j. https://github.com/ollama4j/ollama4j-web-ui
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Traction
 | 
					#### Traction
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[](https://star-history.com/#ollama4j/ollama4j&Date)
 | 
					[](https://star-history.com/#ollama4j/ollama4j&Date)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
### Areas of improvement
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
 | 
					 | 
				
			||||||
  snake-case conventions. (
 | 
					 | 
				
			||||||
  possibly with Jackson-mapper's `@JsonProperty`)
 | 
					 | 
				
			||||||
- [x] Fix deprecated HTTP client code
 | 
					 | 
				
			||||||
- [x] Setup logging
 | 
					 | 
				
			||||||
- [x] Use lombok
 | 
					 | 
				
			||||||
- [x] Update request body creation with Java objects
 | 
					 | 
				
			||||||
- [ ] Async APIs for images
 | 
					 | 
				
			||||||
- [ ] Support for function calling with models like Mistral
 | 
					 | 
				
			||||||
    - [x] generate in sync mode
 | 
					 | 
				
			||||||
    - [ ] generate in async mode
 | 
					 | 
				
			||||||
- [ ] Add custom headers to requests
 | 
					 | 
				
			||||||
- [x] Add additional params for `ask` APIs such as:
 | 
					 | 
				
			||||||
    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
					 | 
				
			||||||
      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
					 | 
				
			||||||
    - [x] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
					 | 
				
			||||||
    - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
					 | 
				
			||||||
    - [x] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
					 | 
				
			||||||
      short
 | 
					 | 
				
			||||||
      conversational memory
 | 
					 | 
				
			||||||
    - [x] `stream`: Add support for streaming responses from the model
 | 
					 | 
				
			||||||
- [ ] Add test cases
 | 
					 | 
				
			||||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
### Get Involved
 | 
					### Get Involved
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<div align="center">
 | 
					<div align="center">
 | 
				
			||||||
@@ -356,7 +325,7 @@ project.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
<p align="center">
 | 
					<p align="center">
 | 
				
			||||||
  <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
 | 
					  <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
 | 
				
			||||||
    <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" />
 | 
					    <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j"  alt=""/>
 | 
				
			||||||
  </a>
 | 
					  </a>
 | 
				
			||||||
</p>
 | 
					</p>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,30 @@
 | 
				
			|||||||
 | 
					---
 | 
				
			||||||
 | 
					sidebar_position: 4
 | 
				
			||||||
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# PS
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API provides a list of running models and details about each model currently loaded into memory.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					package io.github.ollama4j.localtests;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.ps.ModelsProcessResponse;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        ModelsProcessResponse response = ollamaAPI.ps();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println(response);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
@@ -13,7 +13,7 @@ information using the history of already asked questions and the respective answ
 | 
				
			|||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -27,7 +27,7 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // create first user question
 | 
					        // create first user question
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // start conversation with model
 | 
					        // start conversation with model
 | 
				
			||||||
@@ -82,13 +82,40 @@ You will get a response similar to:
 | 
				
			|||||||
]
 | 
					]
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Conversational loop
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI();
 | 
				
			||||||
 | 
					        ollamaAPI.setRequestTimeoutSeconds(60);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
 | 
				
			||||||
 | 
					        OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					        System.out.println(initialChatResult.getResponse());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        List<OllamaChatMessage> history = initialChatResult.getChatHistory();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        while (true) {
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
 | 
				
			||||||
 | 
					            System.out.println(chatResult.getResponse());
 | 
				
			||||||
 | 
					            history = chatResult.getChatHistory();
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Create a conversation where the answer is streamed
 | 
					## Create a conversation where the answer is streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -101,7 +128,7 @@ public class Main {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
                        "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                        "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -132,7 +159,7 @@ import io.github.ollama4j.OllamaAPI;
 | 
				
			|||||||
import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
 | 
					import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -142,7 +169,7 @@ public class Main {
 | 
				
			|||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
 | 
					        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
 | 
				
			||||||
        ollamaAPI.chat(requestModel, streamHandler);
 | 
					        ollamaAPI.chat(requestModel, streamHandler);
 | 
				
			||||||
@@ -156,7 +183,7 @@ public class Main {
 | 
				
			|||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -171,7 +198,7 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // create request with system-prompt (overriding the model defaults) and user question
 | 
					        // create request with system-prompt (overriding the model defaults) and user question
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
				
			||||||
                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -193,8 +220,8 @@ You will get a response similar to:
 | 
				
			|||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -211,7 +238,7 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // Load Image from File and attach to user message (alternatively images could also be added via URL)
 | 
					        // Load Image from File and attach to user message (alternatively images could also be added via URL)
 | 
				
			||||||
        OllamaChatRequestModel requestModel =
 | 
					        OllamaChatRequest requestModel =
 | 
				
			||||||
                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
                        List.of(
 | 
					                        List.of(
 | 
				
			||||||
                                new File("/path/to/image"))).build();
 | 
					                                new File("/path/to/image"))).build();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -13,7 +13,7 @@ the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#gener
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaAsyncResultStreamer;
 | 
					import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -23,9 +23,10 @@ If you have this image downloaded and you pass the path to the downloaded image
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -23,9 +23,10 @@ Passing the link of this image the following code:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -17,7 +17,7 @@ to [this](/apis-extras/options-builder).
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -50,7 +50,7 @@ You will get a response similar to:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -91,7 +91,7 @@ You will get a response similar to:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -139,7 +139,7 @@ You'd then get a response from the model:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
import io.github.ollama4j.utils.SamplePrompts;
 | 
					import io.github.ollama4j.utils.SamplePrompts;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,7 +9,7 @@ inferences.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
import io.github.ollama4j.utils.PromptBuilder;
 | 
					import io.github.ollama4j.utils.PromptBuilder;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -8,7 +8,7 @@ This API lets you get the details of a model on the Ollama server.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java title="GetModelDetails.java"
 | 
					```java title="GetModelDetails.java"
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.ModelDetail;
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -8,7 +8,7 @@ This API lets you list available models on the Ollama server.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
```java title="ListModels.java"
 | 
					```java title="ListModels.java"
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.models.Model;
 | 
					import io.github.ollama4j.models.response.Model;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -118,6 +118,24 @@ Create a new Java class in your project and add this code.
 | 
				
			|||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaAPITest {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println("Is Ollama server running: " + isOllamaServerReachable);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					This uses the default Ollama host as `http://localhost:11434`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Specify a different Ollama host that you want to connect to.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaAPITest {
 | 
					public class OllamaAPITest {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -129,7 +147,7 @@ public class OllamaAPITest {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
					        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
 | 
					        System.out.println("Is Ollama server running: " + isOllamaServerReachable);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -58,6 +58,10 @@ const config = {
 | 
				
			|||||||
                theme: {
 | 
					                theme: {
 | 
				
			||||||
                    customCss: './src/css/custom.css',
 | 
					                    customCss: './src/css/custom.css',
 | 
				
			||||||
                },
 | 
					                },
 | 
				
			||||||
 | 
					                gtag: {
 | 
				
			||||||
 | 
					                    trackingID: 'G-G7FLH6FNDC',
 | 
				
			||||||
 | 
					                    anonymizeIP: false,
 | 
				
			||||||
 | 
					                },
 | 
				
			||||||
            }),
 | 
					            }),
 | 
				
			||||||
        ],
 | 
					        ],
 | 
				
			||||||
    ],
 | 
					    ],
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										1
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							@@ -9,6 +9,7 @@
 | 
				
			|||||||
      "version": "0.0.0",
 | 
					      "version": "0.0.0",
 | 
				
			||||||
      "dependencies": {
 | 
					      "dependencies": {
 | 
				
			||||||
        "@docusaurus/core": "^3.4.0",
 | 
					        "@docusaurus/core": "^3.4.0",
 | 
				
			||||||
 | 
					        "@docusaurus/plugin-google-gtag": "^3.4.0",
 | 
				
			||||||
        "@docusaurus/preset-classic": "^3.4.0",
 | 
					        "@docusaurus/preset-classic": "^3.4.0",
 | 
				
			||||||
        "@docusaurus/theme-mermaid": "^3.4.0",
 | 
					        "@docusaurus/theme-mermaid": "^3.4.0",
 | 
				
			||||||
        "@mdx-js/react": "^3.0.0",
 | 
					        "@mdx-js/react": "^3.0.0",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -15,6 +15,7 @@
 | 
				
			|||||||
  },
 | 
					  },
 | 
				
			||||||
  "dependencies": {
 | 
					  "dependencies": {
 | 
				
			||||||
    "@docusaurus/core": "^3.4.0",
 | 
					    "@docusaurus/core": "^3.4.0",
 | 
				
			||||||
 | 
					    "@docusaurus/plugin-google-gtag": "^3.4.0",
 | 
				
			||||||
    "@docusaurus/preset-classic": "^3.4.0",
 | 
					    "@docusaurus/preset-classic": "^3.4.0",
 | 
				
			||||||
    "@docusaurus/theme-mermaid": "^3.4.0",
 | 
					    "@docusaurus/theme-mermaid": "^3.4.0",
 | 
				
			||||||
    "@mdx-js/react": "^3.0.0",
 | 
					    "@mdx-js/react": "^3.0.0",
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										14
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								pom.xml
									
									
									
									
									
								
							@@ -129,6 +129,13 @@
 | 
				
			|||||||
        </plugins>
 | 
					        </plugins>
 | 
				
			||||||
    </build>
 | 
					    </build>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    <repositories>
 | 
				
			||||||
 | 
					        <repository>
 | 
				
			||||||
 | 
					            <id>gitea</id>
 | 
				
			||||||
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
 | 
					        </repository>
 | 
				
			||||||
 | 
					    </repositories>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <dependencies>
 | 
					    <dependencies>
 | 
				
			||||||
        <dependency>
 | 
					        <dependency>
 | 
				
			||||||
            <groupId>org.projectlombok</groupId>
 | 
					            <groupId>org.projectlombok</groupId>
 | 
				
			||||||
@@ -178,8 +185,13 @@
 | 
				
			|||||||
    </dependencies>
 | 
					    </dependencies>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <distributionManagement>
 | 
					    <distributionManagement>
 | 
				
			||||||
 | 
					        <snapshotRepository>
 | 
				
			||||||
 | 
					            <id>gitea</id>
 | 
				
			||||||
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
 | 
					        </snapshotRepository>
 | 
				
			||||||
        <repository>
 | 
					        <repository>
 | 
				
			||||||
            <id>mvn-repo-id</id>
 | 
					            <id>gitea</id>
 | 
				
			||||||
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
        </repository>
 | 
					        </repository>
 | 
				
			||||||
    </distributionManagement>
 | 
					    </distributionManagement>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -3,16 +3,17 @@ package io.github.ollama4j;
 | 
				
			|||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.exceptions.ToolInvocationException;
 | 
					import io.github.ollama4j.exceptions.ToolInvocationException;
 | 
				
			||||||
import io.github.ollama4j.exceptions.ToolNotFoundException;
 | 
					import io.github.ollama4j.exceptions.ToolNotFoundException;
 | 
				
			||||||
import io.github.ollama4j.models.*;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessage;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessage;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
 | 
				
			||||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestModel;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.ps.ModelsProcessResponse;
 | 
				
			||||||
import io.github.ollama4j.models.request.*;
 | 
					import io.github.ollama4j.models.request.*;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.*;
 | 
				
			||||||
import io.github.ollama4j.tools.*;
 | 
					import io.github.ollama4j.tools.*;
 | 
				
			||||||
import io.github.ollama4j.utils.Options;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
import io.github.ollama4j.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
@@ -57,7 +58,14 @@ public class OllamaAPI {
 | 
				
			|||||||
    private final ToolRegistry toolRegistry = new ToolRegistry();
 | 
					    private final ToolRegistry toolRegistry = new ToolRegistry();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Instantiates the Ollama API.
 | 
					     * Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
 | 
				
			||||||
 | 
					     **/
 | 
				
			||||||
 | 
					    public OllamaAPI() {
 | 
				
			||||||
 | 
					        this.host = "http://localhost:11434";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * Instantiates the Ollama API with specified Ollama host address.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
     * @param host the host address of Ollama server
 | 
					     * @param host the host address of Ollama server
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
@@ -110,6 +118,37 @@ public class OllamaAPI {
 | 
				
			|||||||
        return statusCode == 200;
 | 
					        return statusCode == 200;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * Provides a list of running models and details about each model currently loaded into memory.
 | 
				
			||||||
 | 
					     *
 | 
				
			||||||
 | 
					     * @return ModelsProcessResponse
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
 | 
				
			||||||
 | 
					        String url = this.host + "/api/ps";
 | 
				
			||||||
 | 
					        HttpClient httpClient = HttpClient.newHttpClient();
 | 
				
			||||||
 | 
					        HttpRequest httpRequest = null;
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            httpRequest =
 | 
				
			||||||
 | 
					                    getRequestBuilderDefault(new URI(url))
 | 
				
			||||||
 | 
					                            .header("Accept", "application/json")
 | 
				
			||||||
 | 
					                            .header("Content-type", "application/json")
 | 
				
			||||||
 | 
					                            .GET()
 | 
				
			||||||
 | 
					                            .build();
 | 
				
			||||||
 | 
					        } catch (URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        HttpResponse<String> response = null;
 | 
				
			||||||
 | 
					        response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
 | 
				
			||||||
 | 
					        int statusCode = response.statusCode();
 | 
				
			||||||
 | 
					        String responseString = response.body();
 | 
				
			||||||
 | 
					        if (statusCode == 200) {
 | 
				
			||||||
 | 
					            return Utils.getObjectMapper()
 | 
				
			||||||
 | 
					                    .readValue(responseString, ModelsProcessResponse.class);
 | 
				
			||||||
 | 
					        } else {
 | 
				
			||||||
 | 
					            throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * List available models from Ollama server.
 | 
					     * List available models from Ollama server.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
@@ -351,7 +390,7 @@ public class OllamaAPI {
 | 
				
			|||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
 | 
					    public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
        ollamaRequestModel.setRaw(raw);
 | 
					        ollamaRequestModel.setRaw(raw);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
@@ -420,7 +459,7 @@ public class OllamaAPI {
 | 
				
			|||||||
     * @return the ollama async result callback handle
 | 
					     * @return the ollama async result callback handle
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
 | 
					    public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
        ollamaRequestModel.setRaw(raw);
 | 
					        ollamaRequestModel.setRaw(raw);
 | 
				
			||||||
        URI uri = URI.create(this.host + "/api/generate");
 | 
					        URI uri = URI.create(this.host + "/api/generate");
 | 
				
			||||||
        OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
 | 
					        OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
 | 
				
			||||||
@@ -450,7 +489,7 @@ public class OllamaAPI {
 | 
				
			|||||||
        for (File imageFile : imageFiles) {
 | 
					        for (File imageFile : imageFiles) {
 | 
				
			||||||
            images.add(encodeFileToBase64(imageFile));
 | 
					            images.add(encodeFileToBase64(imageFile));
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -486,7 +525,7 @@ public class OllamaAPI {
 | 
				
			|||||||
        for (String imageURL : imageURLs) {
 | 
					        for (String imageURL : imageURLs) {
 | 
				
			||||||
            images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
					            images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -520,7 +559,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
					     * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
				
			||||||
     * <p>
 | 
					     * <p>
 | 
				
			||||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
					     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
@@ -530,12 +569,12 @@ public class OllamaAPI {
 | 
				
			|||||||
     * @throws IOException          in case the responseStream can not be read
 | 
					     * @throws IOException          in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
 | 
					    public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        return chat(request, null);
 | 
					        return chat(request, null);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
					     * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
				
			||||||
     * <p>
 | 
					     * <p>
 | 
				
			||||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
					     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
@@ -546,7 +585,7 @@ public class OllamaAPI {
 | 
				
			|||||||
     * @throws IOException          in case the responseStream can not be read
 | 
					     * @throws IOException          in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
 | 
					    public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					        OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
        OllamaResult result;
 | 
					        OllamaResult result;
 | 
				
			||||||
        if (streamHandler != null) {
 | 
					        if (streamHandler != null) {
 | 
				
			||||||
@@ -573,7 +612,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaResult generateSyncForOllamaRequestModel(
 | 
					    private OllamaResult generateSyncForOllamaRequestModel(
 | 
				
			||||||
            OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
					            OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
				
			||||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateEndpointCaller requestCaller =
 | 
					        OllamaGenerateEndpointCaller requestCaller =
 | 
				
			||||||
                new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					                new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@ package io.github.ollama4j.models.chat;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.OllamaCommonRequestModel;
 | 
					import io.github.ollama4j.models.request.OllamaCommonRequest;
 | 
				
			||||||
import io.github.ollama4j.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.Getter;
 | 
					import lombok.Getter;
 | 
				
			||||||
@@ -17,20 +17,20 @@ import lombok.Setter;
 | 
				
			|||||||
 */
 | 
					 */
 | 
				
			||||||
@Getter
 | 
					@Getter
 | 
				
			||||||
@Setter
 | 
					@Setter
 | 
				
			||||||
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
 | 
					public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private List<OllamaChatMessage> messages;
 | 
					  private List<OllamaChatMessage> messages;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaChatRequestModel() {}
 | 
					  public OllamaChatRequest() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
 | 
					  public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
    this.model = model;
 | 
					    this.model = model;
 | 
				
			||||||
    this.messages = messages;
 | 
					    this.messages = messages;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Override
 | 
					  @Override
 | 
				
			||||||
  public boolean equals(Object o) {
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
    if (!(o instanceof OllamaChatRequestModel)) {
 | 
					    if (!(o instanceof OllamaChatRequest)) {
 | 
				
			||||||
      return false;
 | 
					      return false;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,5 +1,10 @@
 | 
				
			|||||||
package io.github.ollama4j.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
import java.net.URISyntaxException;
 | 
					import java.net.URISyntaxException;
 | 
				
			||||||
@@ -8,35 +13,29 @@ import java.util.ArrayList;
 | 
				
			|||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
import java.util.stream.Collectors;
 | 
					import java.util.stream.Collectors;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.ollama4j.utils.Options;
 | 
					 | 
				
			||||||
import io.github.ollama4j.utils.Utils;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
 | 
					 * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
public class OllamaChatRequestBuilder {
 | 
					public class OllamaChatRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
 | 
					    private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
        request = new OllamaChatRequestModel(model, messages);
 | 
					        request = new OllamaChatRequest(model, messages);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestModel request;
 | 
					    private OllamaChatRequest request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static OllamaChatRequestBuilder getInstance(String model) {
 | 
					    public static OllamaChatRequestBuilder getInstance(String model) {
 | 
				
			||||||
        return new OllamaChatRequestBuilder(model, new ArrayList<>());
 | 
					        return new OllamaChatRequestBuilder(model, new ArrayList<>());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestModel build(){
 | 
					    public OllamaChatRequest build() {
 | 
				
			||||||
        return request;
 | 
					        return request;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public void reset() {
 | 
					    public void reset() {
 | 
				
			||||||
        request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
 | 
					        request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) {
 | 
					    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) {
 | 
				
			||||||
@@ -63,11 +62,9 @@ public class OllamaChatRequestBuilder {
 | 
				
			|||||||
            for (String imageUrl : imageUrls) {
 | 
					            for (String imageUrl : imageUrls) {
 | 
				
			||||||
                try {
 | 
					                try {
 | 
				
			||||||
                    binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
 | 
					                    binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
 | 
				
			||||||
                }
 | 
					                } catch (URISyntaxException e) {
 | 
				
			||||||
                    catch (URISyntaxException e){
 | 
					 | 
				
			||||||
                    LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e);
 | 
					                    LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e);
 | 
				
			||||||
                }
 | 
					                } catch (IOException e) {
 | 
				
			||||||
                catch (IOException e){
 | 
					 | 
				
			||||||
                    LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e);
 | 
					                    LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e);
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
@@ -78,8 +75,7 @@ public class OllamaChatRequestBuilder {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
 | 
					    public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
 | 
				
			||||||
        this.request.getMessages().addAll(messages);
 | 
					        return new OllamaChatRequestBuilder(request.getModel(), messages);
 | 
				
			||||||
        return this;
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withOptions(Options options) {
 | 
					    public OllamaChatRequestBuilder withOptions(Options options) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,7 @@ package io.github.ollama4j.models.chat;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
 | 
					 * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
 | 
				
			||||||
@@ -16,17 +16,11 @@ public class OllamaChatResult extends OllamaResult{
 | 
				
			|||||||
            List<OllamaChatMessage> chatHistory) {
 | 
					            List<OllamaChatMessage> chatHistory) {
 | 
				
			||||||
        super(response, responseTime, httpStatusCode);
 | 
					        super(response, responseTime, httpStatusCode);
 | 
				
			||||||
        this.chatHistory = chatHistory;
 | 
					        this.chatHistory = chatHistory;
 | 
				
			||||||
        appendAnswerToChatHistory(response);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public List<OllamaChatMessage> getChatHistory() {
 | 
					    public List<OllamaChatMessage> getChatHistory() {
 | 
				
			||||||
        return chatHistory;
 | 
					        return chatHistory;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    private void appendAnswerToChatHistory(String answer){
 | 
					 | 
				
			||||||
        OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
 | 
					 | 
				
			||||||
        this.chatHistory.add(assistantMessage);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,8 +11,6 @@ public class OllamaChatStreamObserver {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
					    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private String message = "";
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
					    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
        this.streamHandler = streamHandler;
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -23,8 +21,7 @@ public class OllamaChatStreamObserver {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
 | 
					    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
 | 
				
			||||||
        message = message + currentResponsePart.getMessage().getContent();
 | 
					        streamHandler.accept(currentResponsePart.getMessage().getContent());
 | 
				
			||||||
        streamHandler.accept(message);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +1,7 @@
 | 
				
			|||||||
package io.github.ollama4j.models.generate;
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.OllamaCommonRequestModel;
 | 
					import io.github.ollama4j.models.request.OllamaCommonRequest;
 | 
				
			||||||
import io.github.ollama4j.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
@@ -11,7 +11,7 @@ import lombok.Setter;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
@Getter
 | 
					@Getter
 | 
				
			||||||
@Setter
 | 
					@Setter
 | 
				
			||||||
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
 | 
					public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private String prompt;
 | 
					  private String prompt;
 | 
				
			||||||
  private List<String> images;
 | 
					  private List<String> images;
 | 
				
			||||||
@@ -20,15 +20,15 @@ public class OllamaGenerateRequestModel extends OllamaCommonRequestModel impleme
 | 
				
			|||||||
  private String context;
 | 
					  private String context;
 | 
				
			||||||
  private boolean raw;
 | 
					  private boolean raw;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaGenerateRequestModel() {
 | 
					  public OllamaGenerateRequest() {
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaGenerateRequestModel(String model, String prompt) {
 | 
					  public OllamaGenerateRequest(String model, String prompt) {
 | 
				
			||||||
    this.model = model;
 | 
					    this.model = model;
 | 
				
			||||||
    this.prompt = prompt;
 | 
					    this.prompt = prompt;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
 | 
					  public OllamaGenerateRequest(String model, String prompt, List<String> images) {
 | 
				
			||||||
    this.model = model;
 | 
					    this.model = model;
 | 
				
			||||||
    this.prompt = prompt;
 | 
					    this.prompt = prompt;
 | 
				
			||||||
    this.images = images;
 | 
					    this.images = images;
 | 
				
			||||||
@@ -36,7 +36,7 @@ public class OllamaGenerateRequestModel extends OllamaCommonRequestModel impleme
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Override
 | 
					    @Override
 | 
				
			||||||
  public boolean equals(Object o) {
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
    if (!(o instanceof OllamaGenerateRequestModel)) {
 | 
					    if (!(o instanceof OllamaGenerateRequest)) {
 | 
				
			||||||
      return false;
 | 
					      return false;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -3,22 +3,22 @@ package io.github.ollama4j.models.generate;
 | 
				
			|||||||
import io.github.ollama4j.utils.Options;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Helper class for creating {@link OllamaGenerateRequestModel}
 | 
					 * Helper class for creating {@link OllamaGenerateRequest}
 | 
				
			||||||
 * objects using the builder-pattern.
 | 
					 * objects using the builder-pattern.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
public class OllamaGenerateRequestBuilder {
 | 
					public class OllamaGenerateRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
					    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
				
			||||||
        request = new OllamaGenerateRequestModel(model, prompt);
 | 
					        request = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestModel request;
 | 
					    private OllamaGenerateRequest request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
					    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
				
			||||||
        return new OllamaGenerateRequestBuilder(model,"");
 | 
					        return new OllamaGenerateRequestBuilder(model,"");
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaGenerateRequestModel build(){
 | 
					    public OllamaGenerateRequest build(){
 | 
				
			||||||
        return request;
 | 
					        return request;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,63 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.ps;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@NoArgsConstructor
 | 
				
			||||||
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
 | 
					public class ModelsProcessResponse {
 | 
				
			||||||
 | 
					    @JsonProperty("models")
 | 
				
			||||||
 | 
					    private List<ModelProcess> models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @NoArgsConstructor
 | 
				
			||||||
 | 
					    public static class ModelProcess {
 | 
				
			||||||
 | 
					        @JsonProperty("name")
 | 
				
			||||||
 | 
					        private String name;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("model")
 | 
				
			||||||
 | 
					        private String model;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("size")
 | 
				
			||||||
 | 
					        private long size;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("digest")
 | 
				
			||||||
 | 
					        private String digest;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("details")
 | 
				
			||||||
 | 
					        private ModelDetails details;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("expires_at")
 | 
				
			||||||
 | 
					        private String expiresAt; // Consider using LocalDateTime if you need to process date/time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("size_vram")
 | 
				
			||||||
 | 
					        private long sizeVram;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @NoArgsConstructor
 | 
				
			||||||
 | 
					    public static class ModelDetails {
 | 
				
			||||||
 | 
					        @JsonProperty("parent_model")
 | 
				
			||||||
 | 
					        private String parentModel;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("format")
 | 
				
			||||||
 | 
					        private String format;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("family")
 | 
				
			||||||
 | 
					        private String family;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("families")
 | 
				
			||||||
 | 
					        private List<String> families;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("parameter_size")
 | 
				
			||||||
 | 
					        private String parameterSize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("quantization_level")
 | 
				
			||||||
 | 
					        private String quantizationLevel;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -2,8 +2,7 @@ package io.github.ollama4j.models.request;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.BasicAuth;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResponseModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResponseModel;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
 | 
					import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.Map;
 | 
					import java.util.Map;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonInclude;
 | 
					import com.fasterxml.jackson.annotation.JsonInclude;
 | 
				
			||||||
@@ -12,7 +12,7 @@ import lombok.Data;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
					@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
				
			||||||
public abstract class OllamaCommonRequestModel {
 | 
					public abstract class OllamaCommonRequest {
 | 
				
			||||||
  
 | 
					  
 | 
				
			||||||
  protected String model;  
 | 
					  protected String model;  
 | 
				
			||||||
  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
					  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
				
			||||||
@@ -2,9 +2,8 @@ package io.github.ollama4j.models.request;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.BasicAuth;
 | 
					import io.github.ollama4j.models.response.OllamaErrorResponse;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaErrorResponseModel;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.ollama4j.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.ollama4j.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
@@ -78,19 +77,19 @@ public abstract class OllamaEndpointCaller {
 | 
				
			|||||||
            while ((line = reader.readLine()) != null) {
 | 
					            while ((line = reader.readLine()) != null) {
 | 
				
			||||||
                if (statusCode == 404) {
 | 
					                if (statusCode == 404) {
 | 
				
			||||||
                    LOG.warn("Status code: 404 (Not Found)");
 | 
					                    LOG.warn("Status code: 404 (Not Found)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
					                    OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
                            Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
					                            Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else if (statusCode == 401) {
 | 
					                } else if (statusCode == 401) {
 | 
				
			||||||
                    LOG.warn("Status code: 401 (Unauthorized)");
 | 
					                    LOG.warn("Status code: 401 (Unauthorized)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
					                    OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
                            Utils.getObjectMapper()
 | 
					                            Utils.getObjectMapper()
 | 
				
			||||||
                                    .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
 | 
					                                    .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else if (statusCode == 400) {
 | 
					                } else if (statusCode == 400) {
 | 
				
			||||||
                    LOG.warn("Status code: 400 (Bad Request)");
 | 
					                    LOG.warn("Status code: 400 (Bad Request)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
 | 
					                    OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
 | 
				
			||||||
                            OllamaErrorResponseModel.class);
 | 
					                            OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else {
 | 
					                } else {
 | 
				
			||||||
                    boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
 | 
					                    boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,8 +2,7 @@ package io.github.ollama4j.models.request;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.BasicAuth;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,7 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.time.OffsetDateTime;
 | 
					import java.time.OffsetDateTime;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -1,8 +1,7 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.OllamaResultStream;
 | 
					 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestModel;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
import io.github.ollama4j.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -25,7 +24,7 @@ import java.time.Duration;
 | 
				
			|||||||
@SuppressWarnings("unused")
 | 
					@SuppressWarnings("unused")
 | 
				
			||||||
public class OllamaAsyncResultStreamer extends Thread {
 | 
					public class OllamaAsyncResultStreamer extends Thread {
 | 
				
			||||||
    private final HttpRequest.Builder requestBuilder;
 | 
					    private final HttpRequest.Builder requestBuilder;
 | 
				
			||||||
    private final OllamaGenerateRequestModel ollamaRequestModel;
 | 
					    private final OllamaGenerateRequest ollamaRequestModel;
 | 
				
			||||||
    private final OllamaResultStream stream = new OllamaResultStream();
 | 
					    private final OllamaResultStream stream = new OllamaResultStream();
 | 
				
			||||||
    private String completeResponse;
 | 
					    private String completeResponse;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -56,7 +55,7 @@ public class OllamaAsyncResultStreamer extends Thread {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    public OllamaAsyncResultStreamer(
 | 
					    public OllamaAsyncResultStreamer(
 | 
				
			||||||
            HttpRequest.Builder requestBuilder,
 | 
					            HttpRequest.Builder requestBuilder,
 | 
				
			||||||
            OllamaGenerateRequestModel ollamaRequestModel,
 | 
					            OllamaGenerateRequest ollamaRequestModel,
 | 
				
			||||||
            long requestTimeoutSeconds) {
 | 
					            long requestTimeoutSeconds) {
 | 
				
			||||||
        this.requestBuilder = requestBuilder;
 | 
					        this.requestBuilder = requestBuilder;
 | 
				
			||||||
        this.ollamaRequestModel = ollamaRequestModel;
 | 
					        this.ollamaRequestModel = ollamaRequestModel;
 | 
				
			||||||
@@ -91,8 +90,8 @@ public class OllamaAsyncResultStreamer extends Thread {
 | 
				
			|||||||
                StringBuilder responseBuffer = new StringBuilder();
 | 
					                StringBuilder responseBuffer = new StringBuilder();
 | 
				
			||||||
                while ((line = reader.readLine()) != null) {
 | 
					                while ((line = reader.readLine()) != null) {
 | 
				
			||||||
                    if (statusCode == 404) {
 | 
					                    if (statusCode == 404) {
 | 
				
			||||||
                        OllamaErrorResponseModel ollamaResponseModel =
 | 
					                        OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
                                Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
					                                Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
 | 
				
			||||||
                        stream.add(ollamaResponseModel.getError());
 | 
					                        stream.add(ollamaResponseModel.getError());
 | 
				
			||||||
                        responseBuffer.append(ollamaResponseModel.getError());
 | 
					                        responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                    } else {
 | 
					                    } else {
 | 
				
			||||||
@@ -1,11 +1,11 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonIgnoreProperties(ignoreUnknown = true)
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
public class OllamaErrorResponseModel {
 | 
					public class OllamaErrorResponse {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private String error;
 | 
					  private String error;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.ollama4j;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.Iterator;
 | 
					import java.util.Iterator;
 | 
				
			||||||
import java.util.LinkedList;
 | 
					import java.util.LinkedList;
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.ollama4j.tools;
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
import lombok.NoArgsConstructor;
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,11 +2,11 @@ package io.github.ollama4j.integrationtests;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.ModelDetail;
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
@@ -177,7 +177,7 @@ class TestRealAPIs {
 | 
				
			|||||||
        testEndpointReachability();
 | 
					        testEndpointReachability();
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
            OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
				
			||||||
                    .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
 | 
					                    .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
 | 
				
			||||||
                    .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
 | 
					                    .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
 | 
				
			||||||
                    .build();
 | 
					                    .build();
 | 
				
			||||||
@@ -197,7 +197,7 @@ class TestRealAPIs {
 | 
				
			|||||||
        testEndpointReachability();
 | 
					        testEndpointReachability();
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
            OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
 | 
				
			||||||
                            "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
					                            "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
				
			||||||
                    .withMessage(OllamaChatMessageRole.USER,
 | 
					                    .withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
@@ -219,7 +219,7 @@ class TestRealAPIs {
 | 
				
			|||||||
        testEndpointReachability();
 | 
					        testEndpointReachability();
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
            OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
                    .build();
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -245,7 +245,7 @@ class TestRealAPIs {
 | 
				
			|||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatRequestBuilder builder =
 | 
					            OllamaChatRequestBuilder builder =
 | 
				
			||||||
                    OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
					                    OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
				
			||||||
            OllamaChatRequestModel requestModel =
 | 
					            OllamaChatRequest requestModel =
 | 
				
			||||||
                    builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					                    builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
                            List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
					                            List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -275,7 +275,7 @@ class TestRealAPIs {
 | 
				
			|||||||
        testEndpointReachability();
 | 
					        testEndpointReachability();
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
				
			||||||
            OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
                            "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
 | 
					                            "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
 | 
				
			||||||
                    .build();
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,9 +2,9 @@ package io.github.ollama4j.unittests;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.ollama4j.models.ModelDetail;
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaAsyncResultStreamer;
 | 
					import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
 | 
				
			||||||
import io.github.ollama4j.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.ollama4j.types.OllamaModelType;
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,16 +5,16 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			|||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import org.json.JSONObject;
 | 
					import org.json.JSONObject;
 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> {
 | 
					public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequest> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestBuilder builder;
 | 
					    private OllamaChatRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -25,32 +25,32 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestOnlyMandatoryFields() {
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestMultipleMessages() {
 | 
					    public void testRequestMultipleMessages() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
				
			||||||
        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
        .build();
 | 
					        .build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithMessageAndImage() {
 | 
					    public void testRequestWithMessageAndImage() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
				
			||||||
                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
					                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithOptions() {
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
        OptionsBuilder b = new OptionsBuilder();
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
            .withOptions(b.setMirostat(1).build())
 | 
					            .withOptions(b.setMirostat(1).build())
 | 
				
			||||||
            .withOptions(b.setTemperature(1L).build())
 | 
					            .withOptions(b.setTemperature(1L).build())
 | 
				
			||||||
            .withOptions(b.setMirostatEta(1L).build())
 | 
					            .withOptions(b.setMirostatEta(1L).build())
 | 
				
			||||||
@@ -62,7 +62,7 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
 | 
				
			|||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class);
 | 
					        OllamaChatRequest deserializeRequest = deserialize(jsonRequest, OllamaChatRequest.class);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
				
			||||||
@@ -76,7 +76,7 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithJsonFormat() {
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
                .withGetJsonResponse().build();
 | 
					                .withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
@@ -89,25 +89,25 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithTemplate() {
 | 
					    public void testWithTemplate() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withTemplate("System Template")
 | 
					        OllamaChatRequest req = builder.withTemplate("System Template")
 | 
				
			||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithStreaming() {
 | 
					    public void testWithStreaming() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withStreaming().build();
 | 
					        OllamaChatRequest req = builder.withStreaming().build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
 | 
					        assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).isStream(), true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithKeepAlive() {
 | 
					    public void testWithKeepAlive() {
 | 
				
			||||||
        String expectedKeepAlive = "5m";
 | 
					        String expectedKeepAlive = "5m";
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
 | 
					        OllamaChatRequest req = builder.withKeepAlive(expectedKeepAlive)
 | 
				
			||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
 | 
					        assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).getKeepAlive(), expectedKeepAlive);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,16 +2,16 @@ package io.github.ollama4j.unittests.jackson;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
import org.json.JSONObject;
 | 
					import org.json.JSONObject;
 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
 | 
				
			||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestModel;
 | 
					 | 
				
			||||||
import io.github.ollama4j.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> {
 | 
					public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequest> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestBuilder builder;
 | 
					    private OllamaGenerateRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -22,27 +22,27 @@ public class TestGenerateRequestSerialization extends AbstractSerializationTest<
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestOnlyMandatoryFields() {
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
					        OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithOptions() {
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
        OptionsBuilder b = new OptionsBuilder();
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
        OllamaGenerateRequestModel req =
 | 
					        OllamaGenerateRequest req =
 | 
				
			||||||
                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
					                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class);
 | 
					        OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithJsonFormat() {
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
        OllamaGenerateRequestModel req =
 | 
					        OllamaGenerateRequest req =
 | 
				
			||||||
                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
					                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serialize(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.ollama4j.unittests.jackson;
 | 
					package io.github.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.ollama4j.models.Model;
 | 
					import io.github.ollama4j.models.response.Model;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
 | 
					public class TestModelRequestSerialization extends AbstractSerializationTest<Model> {
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user