mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-11-04 10:30:41 +01:00 
			
		
		
		
	Compare commits
	
		
			122 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 893e5dd763 | |||
| c520604f4b | |||
| a85c23d64a | |||
| d32a8b7d88 | |||
| 
						 | 
					992625cf86 | ||
| 
						 | 
					bbebd26d07 | ||
| 
						 | 
					3aa0fc77cb | ||
| 
						 | 
					11a98a72a1 | ||
| 
						 | 
					422601c0fc | ||
| 
						 | 
					75e6576a13 | ||
| 
						 | 
					51dd3f3e1e | ||
| 
						 | 
					30250f79d9 | ||
| 
						 | 
					d4ee9ed051 | ||
| 
						 | 
					4412ac683a | ||
| 
						 | 
					b5b1a26941 | ||
| 
						 | 
					a84230bbd1 | ||
| 
						 | 
					00c9b16556 | ||
| 
						 | 
					9a2194334f | ||
| 
						 | 
					f9cf11ecdf | ||
| 
						 | 
					0af80865c3 | ||
| 
						 | 
					a304c01194 | ||
| 
						 | 
					887708864e | ||
| 
						 | 
					2f0c4fdcc9 | ||
| 
						 | 
					73aabd7ca6 | ||
| 
						 | 
					17ca2bdee3 | ||
| 
						 | 
					e43bd3acb4 | ||
| 
						 | 
					0b041f4340 | ||
| 
						 | 
					6c6062b757 | ||
| 
						 | 
					68fd8b7cc8 | ||
| 
						 | 
					bb6f8aa343 | ||
| 
						 | 
					12802be0bc | ||
| 
						 | 
					bd56ccfef7 | ||
| 
						 | 
					51563f276f | ||
| 
						 | 
					6e282124bf | ||
| 
						 | 
					3ab9e4c283 | ||
| 
						 | 
					2db6a22cc7 | ||
| 
						 | 
					cc69341620 | ||
| 
						 | 
					4589a9032c | ||
| 
						 | 
					da273402b5 | ||
| 
						 | 
					cfa8aa14d7 | ||
| 
						 | 
					bc4e8303aa | ||
| 
						 | 
					f2f740a2a0 | ||
| 
						 | 
					4cbb783a61 | ||
| 
						 | 
					5c9e0b7d8a | ||
| 
						 | 
					2f8577a24d | ||
| 
						 | 
					02116b7025 | ||
| 
						 | 
					f3778f8786 | ||
| 
						 | 
					c6141634db | ||
| 
						 | 
					d9f98ad901 | ||
| 
						 | 
					79d97445b8 | ||
| 
						 | 
					1c40697c96 | ||
| 
						 | 
					f03026abb3 | ||
| 
						 | 
					63a6e81ac2 | ||
| 
						 | 
					76cad0f584 | ||
| 
						 | 
					bee2908d1e | ||
| 
						 | 
					8a4c9fd969 | ||
| 
						 | 
					d470f940b0 | ||
| 
						 | 
					df402efaba | ||
| 
						 | 
					677362abbf | ||
| 
						 | 
					81689be194 | ||
| 
						 | 
					fd93036d08 | ||
| 
						 | 
					c9b05a725b | ||
| 
						 | 
					a4e1b4afe9 | ||
| 
						 | 
					3d21813abb | ||
| 
						 | 
					383d0f56ca | ||
| 
						 | 
					af1b213a76 | ||
| 
						 | 
					fed89a9643 | ||
| 
						 | 
					fd32aa33ff | ||
| 
						 | 
					b8a13e89b1 | ||
| 
						 | 
					c8f27edd6e | ||
| 
						 | 
					5a936d8174 | ||
| 
						 | 
					9b5ddbf4c4 | ||
| 
						 | 
					7c233d5734 | ||
| 
						 | 
					e85aeae6e0 | ||
| 
						 | 
					a05052e095 | ||
| 
						 | 
					10eb803e26 | ||
| 
						 | 
					bd2da8fdda | ||
| 
						 | 
					b0bb082bec | ||
| 
						 | 
					81f564ef7f | ||
| 
						 | 
					006b52f3db | ||
| 
						 | 
					16634e60e4 | ||
| 
						 | 
					db8b73075b | ||
| 
						 | 
					dc9f79959a | ||
| 
						 | 
					88f6d00763 | ||
| 
						 | 
					fd3a989a49 | ||
| 
						 | 
					7580c6a549 | ||
| 
						 | 
					9e6503d84b | ||
| 
						 | 
					ee21f7fdd8 | ||
| 
						 | 
					ecc295f484 | ||
| 
						 | 
					c528fef5fc | ||
| 
						 | 
					38f1bda105 | ||
| 
						 | 
					d8a703503a | ||
| 
						 | 
					dd9ba7c937 | ||
| 
						 | 
					cf52c9610c | ||
| 
						 | 
					e8d709e99a | ||
| 
						 | 
					51fbedad69 | ||
| 
						 | 
					953605fa73 | ||
| 
						 | 
					30bfdd9c6d | ||
| 
						 | 
					91ee6cb4c1 | ||
| 
						 | 
					8ef6fac28e | ||
| 
						 | 
					d9e3860123 | ||
| 
						 | 
					515d1f0399 | ||
| 
						 | 
					be549430c5 | ||
| 
						 | 
					4744315d45 | ||
| 
						 | 
					8eea19a539 | ||
| 
						 | 
					b5801d84e0 | ||
| 
						 | 
					165d04b1bb | ||
| 
						 | 
					16d2160b52 | ||
| 
						 | 
					e39c47b8e1 | ||
| 
						 | 
					bb0785140b | ||
| 
						 | 
					e33ad1a1e3 | ||
| 
						 | 
					cd60c506cb | ||
| 
						 | 
					b55925df28 | ||
| 
						 | 
					3a9b8c309d | ||
| 
						 | 
					bf07159522 | ||
| 
						 | 
					f8ca4d041d | ||
| 
						 | 
					9c6a55f7b0 | ||
| 
						 | 
					2866d83a2f | ||
| 
						 | 
					45e5d07581 | ||
| 
						 | 
					3a264cb6bb | ||
| 
						 | 
					54edba144c | ||
| 
						 | 
					9224d2da06 | 
							
								
								
									
										32
									
								
								.gitea/workflows/publish.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								.gitea/workflows/publish.yaml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,32 @@
 | 
				
			|||||||
 | 
					name: Build and Publish
 | 
				
			||||||
 | 
					on: push
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					jobs:
 | 
				
			||||||
 | 
					  build:
 | 
				
			||||||
 | 
					    runs-on: standard-22.04
 | 
				
			||||||
 | 
					    steps:
 | 
				
			||||||
 | 
					      - name: Check out
 | 
				
			||||||
 | 
					        uses: actions/checkout@v4
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Set Up Java
 | 
				
			||||||
 | 
					        uses: actions/setup-java@v4
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          distribution: 'temurin'
 | 
				
			||||||
 | 
					          java-version: '21'
 | 
				
			||||||
 | 
					          #cache: 'maven'
 | 
				
			||||||
 | 
					          #server-id: 'gitea'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Set up Maven
 | 
				
			||||||
 | 
					        uses: stCarolas/setup-maven@v5
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          maven-version: 3.8.2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - run: cat /root/.m2/toolchains.xml
 | 
				
			||||||
 | 
					      - run: cat /root/.m2/settings.xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Build
 | 
				
			||||||
 | 
					        run: mvn -B package --file pom.xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Publish
 | 
				
			||||||
 | 
					        run: mvn deploy
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
							
								
								
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,58 @@
 | 
				
			|||||||
 | 
					name: Release Artifacts to GitHub Maven Packages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					on:
 | 
				
			||||||
 | 
					  release:
 | 
				
			||||||
 | 
					    types: [ created ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					jobs:
 | 
				
			||||||
 | 
					  build:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    runs-on: ubuntu-latest
 | 
				
			||||||
 | 
					    permissions:
 | 
				
			||||||
 | 
					      contents: read
 | 
				
			||||||
 | 
					      packages: write
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    steps:
 | 
				
			||||||
 | 
					      - uses: actions/checkout@v3
 | 
				
			||||||
 | 
					      - name: Set up JDK 17
 | 
				
			||||||
 | 
					        uses: actions/setup-java@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          java-version: '17'
 | 
				
			||||||
 | 
					          distribution: 'temurin'
 | 
				
			||||||
 | 
					          server-id: github
 | 
				
			||||||
 | 
					          settings-path: ${{ github.workspace }}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: maven-settings-xml-action
 | 
				
			||||||
 | 
					        uses: whelk-io/maven-settings-xml-action@v22
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Find and Replace
 | 
				
			||||||
 | 
					        uses: jacobtomlinson/gha-find-replace@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          find: "ollama4j-revision"
 | 
				
			||||||
 | 
					          replace: ${{ github.ref_name }}
 | 
				
			||||||
 | 
					          regex: false
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Find and Replace
 | 
				
			||||||
 | 
					        uses: jacobtomlinson/gha-find-replace@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          find: "mvn-repo-id"
 | 
				
			||||||
 | 
					          replace: github
 | 
				
			||||||
 | 
					          regex: false
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Import GPG key
 | 
				
			||||||
 | 
					        uses: crazy-max/ghaction-import-gpg@v6
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
 | 
				
			||||||
 | 
					          passphrase: ${{ secrets.GPG_PASSPHRASE }}
 | 
				
			||||||
 | 
					      - name: List keys
 | 
				
			||||||
 | 
					        run: gpg -K
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Build with Maven
 | 
				
			||||||
 | 
					        run: mvn --file pom.xml -U clean package -Punit-tests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Publish to GitHub Packages Apache Maven
 | 
				
			||||||
 | 
					        run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j
 | 
				
			||||||
 | 
					        env:
 | 
				
			||||||
 | 
					          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 | 
				
			||||||
							
								
								
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,68 +1,95 @@
 | 
				
			|||||||
# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
 | 
					# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
 | 
				
			||||||
# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
 | 
					# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
name: Test and Publish Package
 | 
					name: Release Artifacts to Maven Central
 | 
				
			||||||
 | 
					 | 
				
			||||||
#on:
 | 
					 | 
				
			||||||
#  release:
 | 
					 | 
				
			||||||
#    types: [ "created" ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
on:
 | 
					on:
 | 
				
			||||||
  push:
 | 
					  release:
 | 
				
			||||||
    branches: [ "main" ]
 | 
					    types: [ created ]
 | 
				
			||||||
  workflow_dispatch:
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#on:
 | 
				
			||||||
 | 
					#  pull_request:
 | 
				
			||||||
 | 
					#    types: [ opened, reopened ]
 | 
				
			||||||
 | 
					#    branches: [ "main" ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
jobs:
 | 
					jobs:
 | 
				
			||||||
  build:
 | 
					  build:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    runs-on: ubuntu-latest
 | 
					    runs-on: ubuntu-latest
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    permissions:
 | 
					    permissions:
 | 
				
			||||||
      contents: write
 | 
					      contents: write
 | 
				
			||||||
      packages: write
 | 
					      packages: write
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    steps:
 | 
					    steps:
 | 
				
			||||||
      - uses: actions/checkout@v3
 | 
					      - uses: actions/checkout@v3
 | 
				
			||||||
      - name: Set up JDK 11
 | 
					
 | 
				
			||||||
 | 
					      - name: Set up JDK 17
 | 
				
			||||||
        uses: actions/setup-java@v3
 | 
					        uses: actions/setup-java@v3
 | 
				
			||||||
        with:
 | 
					        with:
 | 
				
			||||||
          java-version: '11'
 | 
					          java-version: '17'
 | 
				
			||||||
          distribution: 'adopt-hotspot'
 | 
					          distribution: 'temurin'
 | 
				
			||||||
          server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
 | 
					          server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
 | 
				
			||||||
          settings-path: ${{ github.workspace }} # location for the settings.xml file
 | 
					          settings-path: ${{ github.workspace }} # location for the settings.xml file
 | 
				
			||||||
      - name: Build with Maven
 | 
					
 | 
				
			||||||
        run: mvn --file pom.xml -U clean package -Punit-tests
 | 
					      - name: maven-settings-xml-action
 | 
				
			||||||
      - name: Set up Apache Maven Central (Overwrite settings.xml)
 | 
					        uses: whelk-io/maven-settings-xml-action@v22
 | 
				
			||||||
        uses: actions/setup-java@v3
 | 
					 | 
				
			||||||
        with: # running setup-java again overwrites the settings.xml
 | 
					 | 
				
			||||||
          java-version: '11'
 | 
					 | 
				
			||||||
          distribution: 'adopt-hotspot'
 | 
					 | 
				
			||||||
          cache: 'maven'
 | 
					 | 
				
			||||||
          server-id: ossrh
 | 
					 | 
				
			||||||
          server-username: MAVEN_USERNAME
 | 
					 | 
				
			||||||
          server-password: MAVEN_PASSWORD
 | 
					 | 
				
			||||||
          gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
 | 
					 | 
				
			||||||
          gpg-passphrase: MAVEN_GPG_PASSPHRASE
 | 
					 | 
				
			||||||
      - name: Set up Maven cache
 | 
					 | 
				
			||||||
        uses: actions/cache@v3
 | 
					 | 
				
			||||||
        with:
 | 
					        with:
 | 
				
			||||||
          path: ~/.m2/repository
 | 
					          servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]'
 | 
				
			||||||
          key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
 | 
					
 | 
				
			||||||
          restore-keys: |
 | 
					      - name: Import GPG key
 | 
				
			||||||
            ${{ runner.os }}-maven-
 | 
					        uses: crazy-max/ghaction-import-gpg@v6
 | 
				
			||||||
      - name: Build
 | 
					        with:
 | 
				
			||||||
        run: mvn -B -ntp clean install
 | 
					          gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
 | 
				
			||||||
      - name: Upload coverage reports to Codecov
 | 
					          passphrase: ${{ secrets.GPG_PASSPHRASE }}
 | 
				
			||||||
        uses: codecov/codecov-action@v3
 | 
					      - name: List keys
 | 
				
			||||||
 | 
					        run: gpg -K
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Find and Replace
 | 
				
			||||||
 | 
					        uses: jacobtomlinson/gha-find-replace@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          find: "ollama4j-revision"
 | 
				
			||||||
 | 
					          replace: ${{ github.ref_name }}
 | 
				
			||||||
 | 
					          regex: false
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Find and Replace
 | 
				
			||||||
 | 
					        uses: jacobtomlinson/gha-find-replace@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          find: "mvn-repo-id"
 | 
				
			||||||
 | 
					          replace: central
 | 
				
			||||||
 | 
					          regex: false
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Publish to Maven Central
 | 
				
			||||||
 | 
					        run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Upload Release Asset - JAR
 | 
				
			||||||
 | 
					        uses: actions/upload-release-asset@v1
 | 
				
			||||||
        env:
 | 
					        env:
 | 
				
			||||||
          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
 | 
					          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 | 
				
			||||||
      - name: Publish to GitHub Packages Apache Maven
 | 
					        with:
 | 
				
			||||||
        #        if: >
 | 
					          upload_url: ${{ github.event.release.upload_url }}
 | 
				
			||||||
        #          github.event_name != 'pull_request' &&
 | 
					          asset_path: target/ollama4j-${{ github.ref_name }}.jar
 | 
				
			||||||
        #          github.ref_name == 'main' &&
 | 
					          asset_name: ollama4j-${{ github.ref_name }}.jar
 | 
				
			||||||
        #          contains(github.event.head_commit.message, 'release')
 | 
					          asset_content_type: application/x-jar
 | 
				
			||||||
        run: |
 | 
					
 | 
				
			||||||
          git config --global user.email "koujalgi.amith@gmail.com"
 | 
					      - name: Upload Release Asset - Javadoc JAR
 | 
				
			||||||
          git config --global user.name "amithkoujalgi"
 | 
					        uses: actions/upload-release-asset@v1
 | 
				
			||||||
          mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform
 | 
					 | 
				
			||||||
        env:
 | 
					        env:
 | 
				
			||||||
          MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
 | 
					          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 | 
				
			||||||
          MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
 | 
					        with:
 | 
				
			||||||
          MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
 | 
					          upload_url: ${{ github.event.release.upload_url }}
 | 
				
			||||||
 | 
					          asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar
 | 
				
			||||||
 | 
					          asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar
 | 
				
			||||||
 | 
					          asset_content_type: application/x-jar
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Upload Release Asset - Sources JAR
 | 
				
			||||||
 | 
					        uses: actions/upload-release-asset@v1
 | 
				
			||||||
 | 
					        env:
 | 
				
			||||||
 | 
					          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          upload_url: ${{ github.event.release.upload_url }}
 | 
				
			||||||
 | 
					          asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar
 | 
				
			||||||
 | 
					          asset_name: ollama4j-${{ github.ref_name }}-sources.jar
 | 
				
			||||||
 | 
					          asset_content_type: application/x-jar
 | 
				
			||||||
							
								
								
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							@@ -2,9 +2,8 @@
 | 
				
			|||||||
name: Deploy Docs to GH Pages
 | 
					name: Deploy Docs to GH Pages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
on:
 | 
					on:
 | 
				
			||||||
  # Runs on pushes targeting the default branch
 | 
					  release:
 | 
				
			||||||
  push:
 | 
					    types: [ created ]
 | 
				
			||||||
    branches: [ "main" ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  # Allows you to run this workflow manually from the Actions tab
 | 
					  # Allows you to run this workflow manually from the Actions tab
 | 
				
			||||||
  workflow_dispatch:
 | 
					  workflow_dispatch:
 | 
				
			||||||
@@ -47,6 +46,13 @@ jobs:
 | 
				
			|||||||
      - run: cd docs && npm ci
 | 
					      - run: cd docs && npm ci
 | 
				
			||||||
      - run: cd docs && npm run build
 | 
					      - run: cd docs && npm run build
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      - name: Find and Replace
 | 
				
			||||||
 | 
					        uses: jacobtomlinson/gha-find-replace@v3
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          find: "ollama4j-revision"
 | 
				
			||||||
 | 
					          replace: ${{ github.ref_name }}
 | 
				
			||||||
 | 
					          regex: false
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Build with Maven
 | 
					      - name: Build with Maven
 | 
				
			||||||
        run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
 | 
					        run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,52 +0,0 @@
 | 
				
			|||||||
# Simple workflow for deploying static content to GitHub Pages
 | 
					 | 
				
			||||||
name: Deploy Javadoc content to Pages
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
on:
 | 
					 | 
				
			||||||
  # Runs on pushes targeting the default branch
 | 
					 | 
				
			||||||
  push:
 | 
					 | 
				
			||||||
    branches: [ "none" ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Allows you to run this workflow manually from the Actions tab
 | 
					 | 
				
			||||||
  workflow_dispatch:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
 | 
					 | 
				
			||||||
permissions:
 | 
					 | 
				
			||||||
  contents: read
 | 
					 | 
				
			||||||
  pages: write
 | 
					 | 
				
			||||||
  id-token: write
 | 
					 | 
				
			||||||
  packages: write
 | 
					 | 
				
			||||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
 | 
					 | 
				
			||||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
 | 
					 | 
				
			||||||
concurrency:
 | 
					 | 
				
			||||||
  group: "pages"
 | 
					 | 
				
			||||||
  cancel-in-progress: false
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
jobs:
 | 
					 | 
				
			||||||
  # Single deploy job since we're just deploying
 | 
					 | 
				
			||||||
  deploy:
 | 
					 | 
				
			||||||
    runs-on: ubuntu-latest
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    environment:
 | 
					 | 
				
			||||||
      name: github-pages
 | 
					 | 
				
			||||||
      url: ${{ steps.deployment.outputs.page_url }}
 | 
					 | 
				
			||||||
    steps:
 | 
					 | 
				
			||||||
      - uses: actions/checkout@v3
 | 
					 | 
				
			||||||
      - name: Set up JDK 11
 | 
					 | 
				
			||||||
        uses: actions/setup-java@v3
 | 
					 | 
				
			||||||
        with:
 | 
					 | 
				
			||||||
          java-version: '11'
 | 
					 | 
				
			||||||
          distribution: 'adopt-hotspot'
 | 
					 | 
				
			||||||
          server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
 | 
					 | 
				
			||||||
          settings-path: ${{ github.workspace }} # location for the settings.xml file
 | 
					 | 
				
			||||||
      - name: Build with Maven
 | 
					 | 
				
			||||||
        run: mvn --file pom.xml -U clean package
 | 
					 | 
				
			||||||
      - name: Setup Pages
 | 
					 | 
				
			||||||
        uses: actions/configure-pages@v3
 | 
					 | 
				
			||||||
      - name: Upload artifact
 | 
					 | 
				
			||||||
        uses: actions/upload-pages-artifact@v2
 | 
					 | 
				
			||||||
        with:
 | 
					 | 
				
			||||||
          # Upload entire repository
 | 
					 | 
				
			||||||
          path: './target/apidocs/.'
 | 
					 | 
				
			||||||
      - name: Deploy to GitHub Pages
 | 
					 | 
				
			||||||
        id: deployment
 | 
					 | 
				
			||||||
        uses: actions/deploy-pages@v2
 | 
					 | 
				
			||||||
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -37,6 +37,8 @@ build/
 | 
				
			|||||||
### Mac OS ###
 | 
					### Mac OS ###
 | 
				
			||||||
.DS_Store
 | 
					.DS_Store
 | 
				
			||||||
/.idea/
 | 
					/.idea/
 | 
				
			||||||
/src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/
 | 
					 | 
				
			||||||
pom.xml.*
 | 
					pom.xml.*
 | 
				
			||||||
release.properties
 | 
					release.properties
 | 
				
			||||||
 | 
					!.idea/icon.svg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					src/main/java/io/github/ollama4j/localtests
 | 
				
			||||||
							
								
								
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							@@ -0,0 +1,18 @@
 | 
				
			|||||||
 | 
					<?xml version="1.0" encoding="UTF-8"?>
 | 
				
			||||||
 | 
					<svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg">
 | 
				
			||||||
 | 
					<path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/>
 | 
				
			||||||
 | 
					<path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/>
 | 
				
			||||||
 | 
					<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/>
 | 
				
			||||||
 | 
					<path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/>
 | 
				
			||||||
 | 
					<path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/>
 | 
				
			||||||
 | 
					<path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/>
 | 
				
			||||||
 | 
					<path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/>
 | 
				
			||||||
 | 
					<path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/>
 | 
				
			||||||
 | 
					<path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/>
 | 
				
			||||||
 | 
					</svg>
 | 
				
			||||||
| 
		 After Width: | Height: | Size: 6.1 KiB  | 
							
								
								
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,128 @@
 | 
				
			|||||||
 | 
					# Contributor Covenant Code of Conduct
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Our Pledge
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					We as members, contributors, and leaders pledge to make participation in our
 | 
				
			||||||
 | 
					community a harassment-free experience for everyone, regardless of age, body
 | 
				
			||||||
 | 
					size, visible or invisible disability, ethnicity, sex characteristics, gender
 | 
				
			||||||
 | 
					identity and expression, level of experience, education, socio-economic status,
 | 
				
			||||||
 | 
					nationality, personal appearance, race, religion, or sexual identity
 | 
				
			||||||
 | 
					and orientation.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					We pledge to act and interact in ways that contribute to an open, welcoming,
 | 
				
			||||||
 | 
					diverse, inclusive, and healthy community.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Our Standards
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Examples of behavior that contributes to a positive environment for our
 | 
				
			||||||
 | 
					community include:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					* Demonstrating empathy and kindness toward other people
 | 
				
			||||||
 | 
					* Being respectful of differing opinions, viewpoints, and experiences
 | 
				
			||||||
 | 
					* Giving and gracefully accepting constructive feedback
 | 
				
			||||||
 | 
					* Accepting responsibility and apologizing to those affected by our mistakes,
 | 
				
			||||||
 | 
					  and learning from the experience
 | 
				
			||||||
 | 
					* Focusing on what is best not just for us as individuals, but for the
 | 
				
			||||||
 | 
					  overall community
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Examples of unacceptable behavior include:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					* The use of sexualized language or imagery, and sexual attention or
 | 
				
			||||||
 | 
					  advances of any kind
 | 
				
			||||||
 | 
					* Trolling, insulting or derogatory comments, and personal or political attacks
 | 
				
			||||||
 | 
					* Public or private harassment
 | 
				
			||||||
 | 
					* Publishing others' private information, such as a physical or email
 | 
				
			||||||
 | 
					  address, without their explicit permission
 | 
				
			||||||
 | 
					* Other conduct which could reasonably be considered inappropriate in a
 | 
				
			||||||
 | 
					  professional setting
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Enforcement Responsibilities
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Community leaders are responsible for clarifying and enforcing our standards of
 | 
				
			||||||
 | 
					acceptable behavior and will take appropriate and fair corrective action in
 | 
				
			||||||
 | 
					response to any behavior that they deem inappropriate, threatening, offensive,
 | 
				
			||||||
 | 
					or harmful.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Community leaders have the right and responsibility to remove, edit, or reject
 | 
				
			||||||
 | 
					comments, commits, code, wiki edits, issues, and other contributions that are
 | 
				
			||||||
 | 
					not aligned to this Code of Conduct, and will communicate reasons for moderation
 | 
				
			||||||
 | 
					decisions when appropriate.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Scope
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This Code of Conduct applies within all community spaces, and also applies when
 | 
				
			||||||
 | 
					an individual is officially representing the community in public spaces.
 | 
				
			||||||
 | 
					Examples of representing our community include using an official e-mail address,
 | 
				
			||||||
 | 
					posting via an official social media account, or acting as an appointed
 | 
				
			||||||
 | 
					representative at an online or offline event.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Enforcement
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Instances of abusive, harassing, or otherwise unacceptable behavior may be
 | 
				
			||||||
 | 
					reported to the community leaders responsible for enforcement at
 | 
				
			||||||
 | 
					koujalgi.amith@gmail.com.
 | 
				
			||||||
 | 
					All complaints will be reviewed and investigated promptly and fairly.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					All community leaders are obligated to respect the privacy and security of the
 | 
				
			||||||
 | 
					reporter of any incident.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Enforcement Guidelines
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Community leaders will follow these Community Impact Guidelines in determining
 | 
				
			||||||
 | 
					the consequences for any action they deem in violation of this Code of Conduct:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 1. Correction
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Community Impact**: Use of inappropriate language or other behavior deemed
 | 
				
			||||||
 | 
					unprofessional or unwelcome in the community.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Consequence**: A private, written warning from community leaders, providing
 | 
				
			||||||
 | 
					clarity around the nature of the violation and an explanation of why the
 | 
				
			||||||
 | 
					behavior was inappropriate. A public apology may be requested.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 2. Warning
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Community Impact**: A violation through a single incident or series
 | 
				
			||||||
 | 
					of actions.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Consequence**: A warning with consequences for continued behavior. No
 | 
				
			||||||
 | 
					interaction with the people involved, including unsolicited interaction with
 | 
				
			||||||
 | 
					those enforcing the Code of Conduct, for a specified period of time. This
 | 
				
			||||||
 | 
					includes avoiding interactions in community spaces as well as external channels
 | 
				
			||||||
 | 
					like social media. Violating these terms may lead to a temporary or
 | 
				
			||||||
 | 
					permanent ban.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 3. Temporary Ban
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Community Impact**: A serious violation of community standards, including
 | 
				
			||||||
 | 
					sustained inappropriate behavior.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Consequence**: A temporary ban from any sort of interaction or public
 | 
				
			||||||
 | 
					communication with the community for a specified period of time. No public or
 | 
				
			||||||
 | 
					private interaction with the people involved, including unsolicited interaction
 | 
				
			||||||
 | 
					with those enforcing the Code of Conduct, is allowed during this period.
 | 
				
			||||||
 | 
					Violating these terms may lead to a permanent ban.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 4. Permanent Ban
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Community Impact**: Demonstrating a pattern of violation of community
 | 
				
			||||||
 | 
					standards, including sustained inappropriate behavior,  harassment of an
 | 
				
			||||||
 | 
					individual, or aggression toward or disparagement of classes of individuals.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					**Consequence**: A permanent ban from any sort of public interaction within
 | 
				
			||||||
 | 
					the community.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Attribution
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This Code of Conduct is adapted from the [Contributor Covenant][homepage],
 | 
				
			||||||
 | 
					version 2.0, available at
 | 
				
			||||||
 | 
					https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Community Impact Guidelines were inspired by [Mozilla's code of conduct
 | 
				
			||||||
 | 
					enforcement ladder](https://github.com/mozilla/diversity).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[homepage]: https://www.contributor-covenant.org
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					For answers to common questions about this code of conduct, see the FAQ at
 | 
				
			||||||
 | 
					https://www.contributor-covenant.org/faq. Translations are available at
 | 
				
			||||||
 | 
					https://www.contributor-covenant.org/translations.
 | 
				
			||||||
							
								
								
									
										4
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Makefile
									
									
									
									
									
								
							@@ -1,10 +1,10 @@
 | 
				
			|||||||
build:
 | 
					build:
 | 
				
			||||||
	mvn -B clean install
 | 
						mvn -B clean install
 | 
				
			||||||
 | 
					
 | 
				
			||||||
ut:
 | 
					unit-tests:
 | 
				
			||||||
	mvn clean test -Punit-tests
 | 
						mvn clean test -Punit-tests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
it:
 | 
					integration-tests:
 | 
				
			||||||
	mvn clean verify -Pintegration-tests
 | 
						mvn clean verify -Pintegration-tests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
doxygen:
 | 
					doxygen:
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										289
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										289
									
								
								README.md
									
									
									
									
									
								
							@@ -1,31 +1,53 @@
 | 
				
			|||||||
### Ollama4j
 | 
					### Ollama4j
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
 | 
					<p align="center">
 | 
				
			||||||
 | 
					  <img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
 | 
				
			||||||
 | 
					</p>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
 | 
					A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
 | 
					Find more details on the [website](https://ollama4j.github.io/ollama4j/).
 | 
				
			||||||
 | 
					
 | 
				
			||||||

 | 
					<div align="center">
 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||

 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
[](https://codecov.io/gh/amithkoujalgi/ollama4j)
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[](https://codecov.io/gh/ollama4j/ollama4j)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Table of Contents
 | 
					## Table of Contents
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- [How does it work?](#how-does-it-work)
 | 
					- [How does it work?](#how-does-it-work)
 | 
				
			||||||
- [Requirements](#requirements)
 | 
					- [Requirements](#requirements)
 | 
				
			||||||
- [Installation](#installation)
 | 
					- [Installation](#installation)
 | 
				
			||||||
- [API Spec](#api-spec)
 | 
					- [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management)
 | 
				
			||||||
- [Demo APIs](#try-out-the-apis-with-ollama-server)
 | 
					- [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/)
 | 
				
			||||||
- [Development](#development)
 | 
					- [Development](#development)
 | 
				
			||||||
- [Contributions](#get-involved)
 | 
					- [Contributions](#get-involved)
 | 
				
			||||||
- [References](#references)
 | 
					- [References](#references)
 | 
				
			||||||
@@ -46,44 +68,181 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
#### Requirements
 | 
					#### Requirements
 | 
				
			||||||
 | 
					
 | 
				
			||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
[ollama]: https://ollama.ai/
 | 
					<a href="https://ollama.com/" target="_blank">
 | 
				
			||||||
 | 
					  <img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/>
 | 
				
			||||||
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray
 | 
					<table>
 | 
				
			||||||
 | 
					<tr>
 | 
				
			||||||
 | 
					<td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[ollama-docker]: https://hub.docker.com/r/ollama/ollama
 | 
					<a href="https://ollama.ai/" target="_blank">Local Installation</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray
 | 
					</td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Installation
 | 
					<td> 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</td>
 | 
				
			||||||
 | 
					</tr>
 | 
				
			||||||
 | 
					<tr>
 | 
				
			||||||
 | 
					<td>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Install on Linux
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell 
 | 
				
			||||||
 | 
					curl -fsSL https://ollama.com/install.sh | sh
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</td>
 | 
				
			||||||
 | 
					<td>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					CPU only
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					docker run -d -p 11434:11434 \
 | 
				
			||||||
 | 
					  -v ollama:/root/.ollama \
 | 
				
			||||||
 | 
					  --name ollama \
 | 
				
			||||||
 | 
					  ollama/ollama
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					NVIDIA GPU
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					docker run -d -p 11434:11434 \
 | 
				
			||||||
 | 
					  --gpus=all \
 | 
				
			||||||
 | 
					  -v ollama:/root/.ollama \
 | 
				
			||||||
 | 
					  --name ollama \
 | 
				
			||||||
 | 
					  ollama/ollama
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</td>
 | 
				
			||||||
 | 
					</tr>
 | 
				
			||||||
 | 
					</table>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Installation
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> [!NOTE]
 | 
				
			||||||
 | 
					> We are now publishing the artifacts to both Maven Central and GitHub package repositories.
 | 
				
			||||||
 | 
					>
 | 
				
			||||||
 | 
					> Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version
 | 
				
			||||||
 | 
					> according to your requirements.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### For Maven
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#### Using [Maven Central](https://central.sonatype.com/)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central
 | 
				
			||||||
 | 
					
 | 
				
			||||||
In your Maven project, add this dependency:
 | 
					In your Maven project, add this dependency:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```xml
 | 
					```xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.57</version>
 | 
					    <version>1.0.79</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Latest release:
 | 
					#### Using GitHub's Maven Package Repository
 | 
				
			||||||
 | 
					
 | 
				
			||||||

 | 
					[![][ollama4j-releases-shield]][ollama4j-releases-link]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[![][lib-shield]][lib]
 | 
					[ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j
 | 
					[ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<repositories>
 | 
				
			||||||
 | 
					    <repository>
 | 
				
			||||||
 | 
					        <id>github</id>
 | 
				
			||||||
 | 
					        <name>GitHub Apache Maven Packages</name>
 | 
				
			||||||
 | 
					        <url>https://maven.pkg.github.com/ollama4j/ollama4j</url>
 | 
				
			||||||
 | 
					        <releases>
 | 
				
			||||||
 | 
					            <enabled>true</enabled>
 | 
				
			||||||
 | 
					        </releases>
 | 
				
			||||||
 | 
					        <snapshots>
 | 
				
			||||||
 | 
					            <enabled>true</enabled>
 | 
				
			||||||
 | 
					        </snapshots>
 | 
				
			||||||
 | 
					    </repository>
 | 
				
			||||||
 | 
					</repositories>
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
 | 
				
			||||||
 | 
					          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 | 
				
			||||||
 | 
					          xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
 | 
				
			||||||
 | 
					                      http://maven.apache.org/xsd/settings-1.0.0.xsd">
 | 
				
			||||||
 | 
					    <servers>
 | 
				
			||||||
 | 
					        <server>
 | 
				
			||||||
 | 
					            <id>github</id>
 | 
				
			||||||
 | 
					            <username>YOUR-USERNAME</username>
 | 
				
			||||||
 | 
					            <password>YOUR-TOKEN</password>
 | 
				
			||||||
 | 
					        </server>
 | 
				
			||||||
 | 
					    </servers>
 | 
				
			||||||
 | 
					</settings>
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					3. In your Maven project, add this dependency:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<dependency>
 | 
				
			||||||
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
 | 
					    <version>1.0.79</version>
 | 
				
			||||||
 | 
					</dependency>
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### For Gradle
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					1. Add the dependency
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```groovy
 | 
				
			||||||
 | 
					dependencies {
 | 
				
			||||||
 | 
					    implementation 'io.github.ollama4j:ollama4j:1.0.79'
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # (Latest release:)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ([![][lib-shield]][lib])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
 | 
					[lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### API Spec
 | 
					#### API Spec
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/).
 | 
					> [!TIP]
 | 
				
			||||||
 | 
					> Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Development
 | 
					#### Development
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -96,19 +255,18 @@ make build
 | 
				
			|||||||
Run unit tests:
 | 
					Run unit tests:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```shell
 | 
					```shell
 | 
				
			||||||
make ut
 | 
					make unit-tests
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Run integration tests:
 | 
					Run integration tests:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```shell
 | 
					```shell
 | 
				
			||||||
make it
 | 
					make integration-tests
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Releases
 | 
					#### Releases
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub
 | 
					Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
 | 
				
			||||||
Actions CI workflow.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Who's using Ollama4j?
 | 
					#### Who's using Ollama4j?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -119,45 +277,60 @@ Actions CI workflow.
 | 
				
			|||||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
 | 
					- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
 | 
				
			||||||
  server to translate all messages into a specfic target language.
 | 
					  server to translate all messages into a specfic target language.
 | 
				
			||||||
    - https://github.com/liebki/ollama-translator
 | 
					    - https://github.com/liebki/ollama-translator
 | 
				
			||||||
 | 
					- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
 | 
				
			||||||
 | 
					  Ollama4j. https://github.com/ollama4j/ollama4j-web-ui
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#### Traction
 | 
					#### Traction
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[](https://star-history.com/#amithkoujalgi/ollama4j&Date)
 | 
					[](https://star-history.com/#ollama4j/ollama4j&Date)
 | 
				
			||||||
 | 
					 | 
				
			||||||
### Areas of improvement
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
- [x] Use Java-naming conventions for attributes in the request/response models instead of the
 | 
					 | 
				
			||||||
  snake-case conventions. (
 | 
					 | 
				
			||||||
  possibly with Jackson-mapper's `@JsonProperty`)
 | 
					 | 
				
			||||||
- [x] Fix deprecated HTTP client code
 | 
					 | 
				
			||||||
- [x] Setup logging
 | 
					 | 
				
			||||||
- [x] Use lombok
 | 
					 | 
				
			||||||
- [x] Update request body creation with Java objects
 | 
					 | 
				
			||||||
- [ ] Async APIs for images
 | 
					 | 
				
			||||||
- [ ] Add custom headers to requests
 | 
					 | 
				
			||||||
- [x] Add additional params for `ask` APIs such as:
 | 
					 | 
				
			||||||
    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
					 | 
				
			||||||
      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
					 | 
				
			||||||
    - [x] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
					 | 
				
			||||||
    - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
					 | 
				
			||||||
    - [x] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
					 | 
				
			||||||
      short
 | 
					 | 
				
			||||||
      conversational memory
 | 
					 | 
				
			||||||
    - [x] `stream`: Add support for streaming responses from the model
 | 
					 | 
				
			||||||
- [ ] Add test cases
 | 
					 | 
				
			||||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
### Get Involved
 | 
					### Get Involved
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<div align="center">
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<a href=""></a>
 | 
				
			||||||
 | 
					<a href=""></a>
 | 
				
			||||||
 | 
					<a href=""></a>
 | 
				
			||||||
 | 
					<a href=""></a>
 | 
				
			||||||
 | 
					<a href=""></a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[//]: # ()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
 | 
					Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping
 | 
				
			||||||
with code - any sort
 | 
					with code - any sort
 | 
				
			||||||
of contribution is much appreciated.
 | 
					of contribution is much appreciated.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### References
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
### Credits
 | 
					### Credits
 | 
				
			||||||
 | 
					
 | 
				
			||||||
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
 | 
					The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
 | 
				
			||||||
project.
 | 
					project.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
### References
 | 
					**Thanks to the amazing contributors**
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
 | 
					<p align="center">
 | 
				
			||||||
 | 
					  <a href="https://github.com/ollama4j/ollama4j/graphs/contributors">
 | 
				
			||||||
 | 
					    <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j"  alt=""/>
 | 
				
			||||||
 | 
					  </a>
 | 
				
			||||||
 | 
					</p>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Appreciate my work?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					<p align="center">
 | 
				
			||||||
 | 
					  <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a>
 | 
				
			||||||
 | 
					</p>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀
 | 
				
			|||||||
I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
 | 
					I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java
 | 
				
			||||||
applications! 🌐🚀
 | 
					applications! 🌐🚀
 | 
				
			||||||
 | 
					
 | 
				
			||||||
👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j)
 | 
					👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
🌟 Key Features:
 | 
					🌟 Key Features:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -58,9 +58,9 @@ elevate your projects.
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
 | 
					I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/
 | 
					Find the full API spec here: https://ollama4j.github.io/ollama4j/
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/
 | 
					Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).
 | 
					Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -10,6 +10,8 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth.
 | 
				
			|||||||
After configuring basic authentication, all subsequent requests will include the Basic Auth header.
 | 
					After configuring basic authentication, all subsequent requests will include the Basic Auth header.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -31,13 +31,14 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
Also, see how to set those Ollama parameters using
 | 
					Also, see how to set those Ollama parameters using
 | 
				
			||||||
the `OptionsBuilder`
 | 
					the `OptionsBuilder`
 | 
				
			||||||
from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html).
 | 
					from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Build an empty `Options` object
 | 
					## Build an empty `Options` object
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -55,8 +56,8 @@ public class Main {
 | 
				
			|||||||
## Build the `Options` object with values
 | 
					## Build the `Options` object with values
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,6 +7,8 @@ sidebar_position: 3
 | 
				
			|||||||
This API lets you check the reachability of Ollama server.
 | 
					This API lets you check the reachability of Ollama server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,30 @@
 | 
				
			|||||||
 | 
					---
 | 
				
			||||||
 | 
					sidebar_position: 4
 | 
				
			||||||
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# PS
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API provides a list of running models and details about each model currently loaded into memory.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					package io.github.ollama4j.localtests;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.ps.ModelsProcessResponse;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        ModelsProcessResponse response = ollamaAPI.ps();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println(response);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
@@ -7,6 +7,8 @@ sidebar_position: 2
 | 
				
			|||||||
This API lets you set the request timeout for the Ollama client.
 | 
					This API lets you set the request timeout for the Ollama client.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public static void main(String[] args) {
 | 
					  public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client.
 | 
				
			|||||||
## Try asking a question about the model.
 | 
					## Try asking a question about the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -10,6 +10,13 @@ information using the history of already asked questions and the respective answ
 | 
				
			|||||||
## Create a new conversation and use chat history to augment follow up questions
 | 
					## Create a new conversation and use chat history to augment follow up questions
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -20,7 +27,7 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // create first user question
 | 
					        // create first user question
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // start conversation with model
 | 
					        // start conversation with model
 | 
				
			||||||
@@ -75,9 +82,44 @@ You will get a response similar to:
 | 
				
			|||||||
]
 | 
					]
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Conversational loop
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI();
 | 
				
			||||||
 | 
					        ollamaAPI.setRequestTimeoutSeconds(60);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
 | 
				
			||||||
 | 
					        OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					        System.out.println(initialChatResult.getResponse());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        List<OllamaChatMessage> history = initialChatResult.getChatHistory();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        while (true) {
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
 | 
				
			||||||
 | 
					            System.out.println(chatResult.getResponse());
 | 
				
			||||||
 | 
					            history = chatResult.getChatHistory();
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Create a conversation where the answer is streamed
 | 
					## Create a conversation where the answer is streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -86,7 +128,7 @@ public class Main {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
                        "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                        "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -113,7 +155,13 @@ You will get a response similar to:
 | 
				
			|||||||
## Use a simple Console Output Stream Handler
 | 
					## Use a simple Console Output Stream Handler
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
    public static void main(String[] args) throws Exception {
 | 
					    public static void main(String[] args) throws Exception {
 | 
				
			||||||
@@ -121,7 +169,7 @@ public class Main {
 | 
				
			|||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
 | 
					        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
 | 
				
			||||||
        ollamaAPI.chat(requestModel, streamHandler);
 | 
					        ollamaAPI.chat(requestModel, streamHandler);
 | 
				
			||||||
@@ -132,6 +180,14 @@ public class Main {
 | 
				
			|||||||
## Create a new conversation with individual system prompt
 | 
					## Create a new conversation with individual system prompt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -142,7 +198,7 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // create request with system-prompt (overriding the model defaults) and user question
 | 
					        // create request with system-prompt (overriding the model defaults) and user question
 | 
				
			||||||
        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
					        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
				
			||||||
                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -162,6 +218,16 @@ You will get a response similar to:
 | 
				
			|||||||
## Create a conversation about an image (requires model with image recognition skills)
 | 
					## Create a conversation about an image (requires model with image recognition skills)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.File;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -172,9 +238,10 @@ public class Main {
 | 
				
			|||||||
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // Load Image from File and attach to user message (alternatively images could also be added via URL)
 | 
					        // Load Image from File and attach to user message (alternatively images could also be added via URL)
 | 
				
			||||||
        OllamaChatRequestModel requestModel =
 | 
					        OllamaChatRequest requestModel =
 | 
				
			||||||
                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
                        List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
					                        List.of(
 | 
				
			||||||
 | 
					                                new File("/path/to/image"))).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
        System.out.println("First answer: " + chatResult.getResponse());
 | 
					        System.out.println("First answer: " + chatResult.getResponse());
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,38 +5,44 @@ sidebar_position: 2
 | 
				
			|||||||
# Generate - Async
 | 
					# Generate - Async
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions to the LLMs in a asynchronous way.
 | 
					This API lets you ask questions to the LLMs in a asynchronous way.
 | 
				
			||||||
These APIs correlate to
 | 
					This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
 | 
				
			||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
 | 
					background (such as threads) without blocking your code until the response arrives from the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API corresponds to
 | 
				
			||||||
 | 
					the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) throws Exception {
 | 
				
			||||||
 | 
					 | 
				
			||||||
        String host = "http://localhost:11434/";
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					 | 
				
			||||||
        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        ollamaAPI.setRequestTimeoutSeconds(60);
 | 
				
			||||||
 | 
					        String prompt = "List all cricket world cup teams of 2019.";
 | 
				
			||||||
 | 
					        OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String prompt = "Who are you?";
 | 
					        // Set the poll interval according to your needs. 
 | 
				
			||||||
 | 
					        // Smaller the poll interval, more frequently you receive the tokens.
 | 
				
			||||||
 | 
					        int pollIntervalMilliseconds = 1000;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt);
 | 
					        while (true) {
 | 
				
			||||||
 | 
					            String tokens = streamer.getStream().poll();
 | 
				
			||||||
        while (!callback.isComplete() || !callback.getStream().isEmpty()) {
 | 
					            System.out.print(tokens);
 | 
				
			||||||
            // poll for data from the response stream
 | 
					            if (!streamer.isAlive()) {
 | 
				
			||||||
            String result = callback.getStream().poll();
 | 
					                break;
 | 
				
			||||||
            if (result != null) {
 | 
					 | 
				
			||||||
                System.out.print(result);
 | 
					 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
            Thread.sleep(100);
 | 
					            Thread.sleep(pollIntervalMilliseconds);
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println("\n------------------------");
 | 
				
			||||||
 | 
					        System.out.println("Complete Response:");
 | 
				
			||||||
 | 
					        System.out.println("------------------------");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println(streamer.getCompleteResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					 | 
				
			||||||
You will get a response similar to:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
> I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational
 | 
					 | 
				
			||||||
> manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide
 | 
					 | 
				
			||||||
> range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that
 | 
					 | 
				
			||||||
> require
 | 
					 | 
				
			||||||
> natural language understanding and generation capabilities.
 | 
					 | 
				
			||||||
@@ -12,6 +12,10 @@ Parameters:
 | 
				
			|||||||
- `prompt`: text to generate embeddings for
 | 
					- `prompt`: text to generate embeddings for
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,12 +1,12 @@
 | 
				
			|||||||
---
 | 
					---
 | 
				
			||||||
sidebar_position: 3
 | 
					sidebar_position: 4
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Generate - With Image Files
 | 
					# Generate - With Image Files
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions along with the image files to the LLMs.
 | 
					This API lets you ask questions along with the image files to the LLMs.
 | 
				
			||||||
These APIs correlate to
 | 
					This API corresponds to
 | 
				
			||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
 | 
					the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
:::note
 | 
					:::note
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -22,6 +22,14 @@ If you have this image downloaded and you pass the path to the downloaded image
 | 
				
			|||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.File;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -32,7 +40,9 @@ public class Main {
 | 
				
			|||||||
        OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
 | 
					        OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
 | 
				
			||||||
                "What's in this image?",
 | 
					                "What's in this image?",
 | 
				
			||||||
                List.of(
 | 
					                List.of(
 | 
				
			||||||
                        new File("/path/to/image")));
 | 
					                        new File("/path/to/image")),
 | 
				
			||||||
 | 
					                new OptionsBuilder().build()
 | 
				
			||||||
 | 
					        );
 | 
				
			||||||
        System.out.println(result.getResponse());
 | 
					        System.out.println(result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,12 +1,12 @@
 | 
				
			|||||||
---
 | 
					---
 | 
				
			||||||
sidebar_position: 4
 | 
					sidebar_position: 5
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Generate - With Image URLs
 | 
					# Generate - With Image URLs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions along with the image files to the LLMs.
 | 
					This API lets you ask questions along with the image files to the LLMs.
 | 
				
			||||||
These APIs correlate to
 | 
					This API corresponds to
 | 
				
			||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
 | 
					the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
:::note
 | 
					:::note
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -22,6 +22,13 @@ Passing the link of this image the following code:
 | 
				
			|||||||

 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -32,7 +39,9 @@ public class Main {
 | 
				
			|||||||
        OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
 | 
					        OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
 | 
				
			||||||
                "What's in this image?",
 | 
					                "What's in this image?",
 | 
				
			||||||
                List.of(
 | 
					                List.of(
 | 
				
			||||||
                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"));
 | 
					                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
 | 
				
			||||||
 | 
					                new OptionsBuilder().build()
 | 
				
			||||||
 | 
					        );
 | 
				
			||||||
        System.out.println(result.getResponse());
 | 
					        System.out.println(result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										372
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										372
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,372 @@
 | 
				
			|||||||
 | 
					---
 | 
				
			||||||
 | 
					sidebar_position: 3
 | 
				
			||||||
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Generate - With Tools
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
 | 
				
			||||||
 | 
					synchronous way.
 | 
				
			||||||
 | 
					This API corresponds to
 | 
				
			||||||
 | 
					the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					:::note
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This is an only an experimental implementation and has a very basic design.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign
 | 
				
			||||||
 | 
					this
 | 
				
			||||||
 | 
					in the future if tooling is supported for more models with a generic interaction standard from Ollama.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					:::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Function Calling/Tools
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Assume you want to call a method in your code based on the response generated from the model.
 | 
				
			||||||
 | 
					For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
 | 
				
			||||||
 | 
					transaction from your database and respond to the user with the transaction details.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Create Functions
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					We can create static functions as our tools.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
 | 
				
			||||||
 | 
					fuel price value.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public static String getCurrentFuelPrice(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					    String location = arguments.get("location").toString();
 | 
				
			||||||
 | 
					    String fuelType = arguments.get("fuelType").toString();
 | 
				
			||||||
 | 
					    return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This function takes the argument `city` and performs an operation with the argument and returns the weather for a
 | 
				
			||||||
 | 
					location.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public static String getCurrentWeather(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					    String location = arguments.get("city").toString();
 | 
				
			||||||
 | 
					    return "Currently " + location + "'s weather is nice.";
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Another way to create our tools is by creating classes by extending `ToolFunction`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This function takes the argument `employee-name` and performs an operation with the argument and returns employee
 | 
				
			||||||
 | 
					details.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					class DBQueryFunction implements ToolFunction {
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public Object apply(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					        // perform DB operations here
 | 
				
			||||||
 | 
					        return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Define Tool Specifications
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
 | 
				
			||||||
 | 
					- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					        .functionName("current-fuel-price")
 | 
				
			||||||
 | 
					        .functionDescription("Get current fuel price")
 | 
				
			||||||
 | 
					        .properties(
 | 
				
			||||||
 | 
					                new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                        .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
 | 
				
			||||||
 | 
					                        .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
 | 
				
			||||||
 | 
					                        .build()
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        .toolDefinition(SampleTools::getCurrentFuelPrice)
 | 
				
			||||||
 | 
					        .build();
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- Specify the function `name`, `description`, and `required` property (`city`).
 | 
				
			||||||
 | 
					- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					        .functionName("current-weather")
 | 
				
			||||||
 | 
					        .functionDescription("Get current weather")
 | 
				
			||||||
 | 
					        .properties(
 | 
				
			||||||
 | 
					                new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                        .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
 | 
				
			||||||
 | 
					                        .build()
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        .toolDefinition(SampleTools::getCurrentWeather)
 | 
				
			||||||
 | 
					        .build();
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- Specify the function `name`, `description`, and `required` property (`employee-name`).
 | 
				
			||||||
 | 
					- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					        .functionName("get-employee-details")
 | 
				
			||||||
 | 
					        .functionDescription("Get employee details from the database")
 | 
				
			||||||
 | 
					        .properties(
 | 
				
			||||||
 | 
					                new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                        .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
 | 
				
			||||||
 | 
					                        .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
 | 
				
			||||||
 | 
					                        .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
 | 
				
			||||||
 | 
					                        .build()
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        .toolDefinition(new DBQueryFunction())
 | 
				
			||||||
 | 
					        .build();
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Register the Tools
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					ollamaAPI.registerTool(fuelPriceToolSpecification);
 | 
				
			||||||
 | 
					ollamaAPI.registerTool(weatherToolSpecification);
 | 
				
			||||||
 | 
					ollamaAPI.registerTool(databaseQueryToolSpecification);
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Create prompt with Tools
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					String prompt1 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("What is the petrol price in Bengaluru?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
 | 
				
			||||||
 | 
					for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
 | 
				
			||||||
 | 
					    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Now, fire away your question to the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::tip[LLM Response]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					String prompt2 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("What is the current weather in Bengaluru?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
 | 
				
			||||||
 | 
					for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
 | 
				
			||||||
 | 
					    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Again, fire away your question to the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::tip[LLM Response]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					String prompt3 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(databaseQueryToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
 | 
				
			||||||
 | 
					for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
 | 
				
			||||||
 | 
					    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Again, fire away your question to the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::tip[LLM Response]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
 | 
				
			||||||
 | 
					Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Full Example
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.ToolInvocationException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.tools.OllamaToolsResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.tools.ToolFunction;
 | 
				
			||||||
 | 
					import io.github.ollama4j.tools.Tools;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					import java.util.Arrays;
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					import java.util.UUID;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class FunctionCallingWithMistralExample {
 | 
				
			||||||
 | 
					    public static void main(String[] args) throws Exception {
 | 
				
			||||||
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        ollamaAPI.setRequestTimeoutSeconds(60);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String model = "mistral";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					                .functionName("current-fuel-price")
 | 
				
			||||||
 | 
					                .functionDescription("Get current fuel price")
 | 
				
			||||||
 | 
					                .properties(
 | 
				
			||||||
 | 
					                        new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                                .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
 | 
				
			||||||
 | 
					                                .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build())
 | 
				
			||||||
 | 
					                                .build()
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					                .toolDefinition(SampleTools::getCurrentFuelPrice)
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					                .functionName("current-weather")
 | 
				
			||||||
 | 
					                .functionDescription("Get current weather")
 | 
				
			||||||
 | 
					                .properties(
 | 
				
			||||||
 | 
					                        new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                                .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build())
 | 
				
			||||||
 | 
					                                .build()
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					                .toolDefinition(SampleTools::getCurrentWeather)
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
 | 
				
			||||||
 | 
					                .functionName("get-employee-details")
 | 
				
			||||||
 | 
					                .functionDescription("Get employee details from the database")
 | 
				
			||||||
 | 
					                .properties(
 | 
				
			||||||
 | 
					                        new Tools.PropsBuilder()
 | 
				
			||||||
 | 
					                                .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
 | 
				
			||||||
 | 
					                                .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
 | 
				
			||||||
 | 
					                                .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
 | 
				
			||||||
 | 
					                                .build()
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					                .toolDefinition(new DBQueryFunction())
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        ollamaAPI.registerTool(fuelPriceToolSpecification);
 | 
				
			||||||
 | 
					        ollamaAPI.registerTool(weatherToolSpecification);
 | 
				
			||||||
 | 
					        ollamaAPI.registerTool(databaseQueryToolSpecification);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String prompt1 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("What is the petrol price in Bengaluru?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					        ask(ollamaAPI, model, prompt1);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String prompt2 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("What is the current weather in Bengaluru?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					        ask(ollamaAPI, model, prompt2);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String prompt3 = new Tools.PromptBuilder()
 | 
				
			||||||
 | 
					                .withToolSpecification(fuelPriceToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(weatherToolSpecification)
 | 
				
			||||||
 | 
					                .withToolSpecification(databaseQueryToolSpecification)
 | 
				
			||||||
 | 
					                .withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					        ask(ollamaAPI, model, prompt3);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
 | 
				
			||||||
 | 
					        OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
 | 
				
			||||||
 | 
					        for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
 | 
				
			||||||
 | 
					            System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class SampleTools {
 | 
				
			||||||
 | 
					    public static String getCurrentFuelPrice(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					        // Get details from fuel price API
 | 
				
			||||||
 | 
					        String location = arguments.get("location").toString();
 | 
				
			||||||
 | 
					        String fuelType = arguments.get("fuelType").toString();
 | 
				
			||||||
 | 
					        return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static String getCurrentWeather(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					        // Get details from weather API
 | 
				
			||||||
 | 
					        String location = arguments.get("city").toString();
 | 
				
			||||||
 | 
					        return "Currently " + location + "'s weather is nice.";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class DBQueryFunction implements ToolFunction {
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public Object apply(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					        // perform DB operations here
 | 
				
			||||||
 | 
					        return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Run this full example and you will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::tip[LLM Response]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
 | 
				
			||||||
 | 
					Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					::::
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### Potential Improvements
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool
 | 
				
			||||||
 | 
					registration. For example:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price")
 | 
				
			||||||
 | 
					public String getCurrentFuelPrice(Map<String, Object> arguments) {
 | 
				
			||||||
 | 
					    String location = arguments.get("location").toString();
 | 
				
			||||||
 | 
					    String fuelType = arguments.get("fuelType").toString();
 | 
				
			||||||
 | 
					    return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
 | 
				
			||||||
 | 
					specific args separately with their data types. For example:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```shell
 | 
				
			||||||
 | 
					public String getCurrentFuelPrice(String location, String fuelType) {
 | 
				
			||||||
 | 
					    return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Updating async/chat APIs with support for tool-based generation. 
 | 
				
			||||||
@@ -5,17 +5,22 @@ sidebar_position: 1
 | 
				
			|||||||
# Generate - Sync
 | 
					# Generate - Sync
 | 
				
			||||||
 | 
					
 | 
				
			||||||
This API lets you ask questions to the LLMs in a synchronous way.
 | 
					This API lets you ask questions to the LLMs in a synchronous way.
 | 
				
			||||||
These APIs correlate to
 | 
					This API corresponds to
 | 
				
			||||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs.
 | 
					the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Use the `OptionBuilder` to build the `Options` object
 | 
					Use the `OptionBuilder` to build the `Options` object
 | 
				
			||||||
with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
					with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
				
			||||||
Refer
 | 
					Refer
 | 
				
			||||||
to [this](/docs/apis-extras/options-builder).
 | 
					to [this](/apis-extras/options-builder).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Try asking a question about the model.
 | 
					## Try asking a question about the model.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -44,6 +49,11 @@ You will get a response similar to:
 | 
				
			|||||||
## Try asking a question, receiving the answer streamed
 | 
					## Try asking a question, receiving the answer streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -61,10 +71,11 @@ public class Main {
 | 
				
			|||||||
                "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					                "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
                new OptionsBuilder().build(), streamHandler);
 | 
					                new OptionsBuilder().build(), streamHandler);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        System.out.println("Full response: " +result.getResponse());
 | 
					        System.out.println("Full response: " + result.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
You will get a response similar to:
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
> The
 | 
					> The
 | 
				
			||||||
@@ -79,6 +90,11 @@ You will get a response similar to:
 | 
				
			|||||||
## Try asking a question from general topics.
 | 
					## Try asking a question from general topics.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -122,6 +138,12 @@ You'd then get a response from the model:
 | 
				
			|||||||
## Try asking for a Database query for your data schema.
 | 
					## Try asking for a Database query for your data schema.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.SamplePrompts;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -140,7 +162,7 @@ public class Main {
 | 
				
			|||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_Note: Here I've used
 | 
					_Note: Here I've used
 | 
				
			||||||
a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
 | 
					a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
 | 
				
			||||||
containing a database schema from within this library for demonstration purposes._
 | 
					containing a database schema from within this library for demonstration purposes._
 | 
				
			||||||
 | 
					
 | 
				
			||||||
You'd then get a response from the model:
 | 
					You'd then get a response from the model:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,5 @@
 | 
				
			|||||||
---
 | 
					---
 | 
				
			||||||
sidebar_position: 5
 | 
					sidebar_position: 6
 | 
				
			||||||
---
 | 
					---
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Prompt Builder
 | 
					# Prompt Builder
 | 
				
			||||||
@@ -8,13 +8,13 @@ This is designed for prompt engineering. It allows you to easily build the promp
 | 
				
			|||||||
inferences.
 | 
					inferences.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.PromptBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					public class Main {
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
public class AskPhi {
 | 
					 | 
				
			||||||
    public static void main(String[] args) throws Exception {
 | 
					    public static void main(String[] args) throws Exception {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String host = "http://localhost:11434/";
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
@@ -42,7 +42,8 @@ public class AskPhi {
 | 
				
			|||||||
                        .addSeparator()
 | 
					                        .addSeparator()
 | 
				
			||||||
                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
					                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
 | 
					        boolean raw = false;
 | 
				
			||||||
 | 
					        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build());
 | 
				
			||||||
        System.out.println(response.getResponse());
 | 
					        System.out.println(response.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,6 +9,8 @@ This API lets you create a custom model on the Ollama server.
 | 
				
			|||||||
### Create a model from an existing Modelfile in the Ollama server
 | 
					### Create a model from an existing Modelfile in the Ollama server
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java title="CreateModel.java"
 | 
					```java title="CreateModel.java"
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class CreateModel {
 | 
					public class CreateModel {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,6 +7,8 @@ sidebar_position: 5
 | 
				
			|||||||
This API lets you create a delete a model from the Ollama server.
 | 
					This API lets you create a delete a model from the Ollama server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java title="DeleteModel.java"
 | 
					```java title="DeleteModel.java"
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,6 +7,10 @@ sidebar_position: 3
 | 
				
			|||||||
This API lets you get the details of a model on the Ollama server.
 | 
					This API lets you get the details of a model on the Ollama server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java title="GetModelDetails.java"
 | 
					```java title="GetModelDetails.java"
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,6 +7,11 @@ sidebar_position: 1
 | 
				
			|||||||
This API lets you list available models on the Ollama server.
 | 
					This API lets you list available models on the Ollama server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java title="ListModels.java"
 | 
					```java title="ListModels.java"
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.Model;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class ListModels {
 | 
					public class ListModels {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,6 +7,9 @@ sidebar_position: 2
 | 
				
			|||||||
This API lets you pull a model on the Ollama server.
 | 
					This API lets you pull a model on the Ollama server.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java title="PullModel.java"
 | 
					```java title="PullModel.java"
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -78,13 +78,13 @@ Add the dependency to your project's `pom.xml`.
 | 
				
			|||||||
```xml
 | 
					```xml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.27</version>
 | 
					    <version>1.0.78</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j).
 | 
					Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
 | 
					You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For
 | 
				
			||||||
example,
 | 
					example,
 | 
				
			||||||
@@ -116,6 +116,26 @@ or use other suitable implementations.
 | 
				
			|||||||
Create a new Java class in your project and add this code.
 | 
					Create a new Java class in your project and add this code.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaAPITest {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        System.out.println("Is Ollama server running: " + isOllamaServerReachable);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					This uses the default Ollama host as `http://localhost:11434`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Specify a different Ollama host that you want to connect to.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaAPITest {
 | 
					public class OllamaAPITest {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static void main(String[] args) {
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
@@ -127,7 +147,7 @@ public class OllamaAPITest {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
					        boolean isOllamaServerReachable = ollamaAPI.ping();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        System.out.println("Is Ollama server alive: " + isOllamaServerReachable);
 | 
					        System.out.println("Is Ollama server running: " + isOllamaServerReachable);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -20,7 +20,7 @@ const config = {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    // GitHub pages deployment config.
 | 
					    // GitHub pages deployment config.
 | 
				
			||||||
    // If you aren't using GitHub pages, you don't need these.
 | 
					    // If you aren't using GitHub pages, you don't need these.
 | 
				
			||||||
    organizationName: 'amithkoujalgi', // Usually your GitHub org/user name.
 | 
					    organizationName: 'ollama4j', // Usually your GitHub org/user name.
 | 
				
			||||||
    projectName: 'ollama4j', // Usually your repo name.
 | 
					    projectName: 'ollama4j', // Usually your repo name.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    onBrokenLinks: 'throw',
 | 
					    onBrokenLinks: 'throw',
 | 
				
			||||||
@@ -40,22 +40,28 @@ const config = {
 | 
				
			|||||||
            /** @type {import('@docusaurus/preset-classic').Options} */
 | 
					            /** @type {import('@docusaurus/preset-classic').Options} */
 | 
				
			||||||
            ({
 | 
					            ({
 | 
				
			||||||
                docs: {
 | 
					                docs: {
 | 
				
			||||||
 | 
					                    path: 'docs',
 | 
				
			||||||
 | 
					                    routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro.
 | 
				
			||||||
                    sidebarPath: './sidebars.js',
 | 
					                    sidebarPath: './sidebars.js',
 | 
				
			||||||
                    // Please change this to your repo.
 | 
					                    // Please change this to your repo.
 | 
				
			||||||
                    // Remove this to remove the "edit this page" links.
 | 
					                    // Remove this to remove the "edit this page" links.
 | 
				
			||||||
                    editUrl:
 | 
					                    editUrl:
 | 
				
			||||||
                        'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
 | 
					                        'https://github.com/ollama4j/ollama4j/blob/main/docs',
 | 
				
			||||||
                },
 | 
					                },
 | 
				
			||||||
                blog: {
 | 
					                blog: {
 | 
				
			||||||
                    showReadingTime: true,
 | 
					                    showReadingTime: true,
 | 
				
			||||||
                    // Please change this to your repo.
 | 
					                    // Please change this to your repo.
 | 
				
			||||||
                    // Remove this to remove the "edit this page" links.
 | 
					                    // Remove this to remove the "edit this page" links.
 | 
				
			||||||
                    editUrl:
 | 
					                    editUrl:
 | 
				
			||||||
                        'https://github.com/amithkoujalgi/ollama4j/blob/main/docs',
 | 
					                        'https://github.com/ollama4j/ollama4j/blob/main/docs',
 | 
				
			||||||
                },
 | 
					                },
 | 
				
			||||||
                theme: {
 | 
					                theme: {
 | 
				
			||||||
                    customCss: './src/css/custom.css',
 | 
					                    customCss: './src/css/custom.css',
 | 
				
			||||||
                },
 | 
					                },
 | 
				
			||||||
 | 
					                gtag: {
 | 
				
			||||||
 | 
					                    trackingID: 'G-G7FLH6FNDC',
 | 
				
			||||||
 | 
					                    anonymizeIP: false,
 | 
				
			||||||
 | 
					                },
 | 
				
			||||||
            }),
 | 
					            }),
 | 
				
			||||||
        ],
 | 
					        ],
 | 
				
			||||||
    ],
 | 
					    ],
 | 
				
			||||||
@@ -78,11 +84,11 @@ const config = {
 | 
				
			|||||||
                        position: 'left',
 | 
					                        position: 'left',
 | 
				
			||||||
                        label: 'Docs',
 | 
					                        label: 'Docs',
 | 
				
			||||||
                    },
 | 
					                    },
 | 
				
			||||||
                    {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
 | 
					                    {to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'},
 | 
				
			||||||
                    {to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
 | 
					                    {to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'},
 | 
				
			||||||
                    {to: '/blog', label: 'Blog', position: 'left'},
 | 
					                    {to: '/blog', label: 'Blog', position: 'left'},
 | 
				
			||||||
                    {
 | 
					                    {
 | 
				
			||||||
                        href: 'https://github.com/amithkoujalgi/ollama4j',
 | 
					                        href: 'https://github.com/ollama4j/ollama4j',
 | 
				
			||||||
                        label: 'GitHub',
 | 
					                        label: 'GitHub',
 | 
				
			||||||
                        position: 'right',
 | 
					                        position: 'right',
 | 
				
			||||||
                    },
 | 
					                    },
 | 
				
			||||||
@@ -96,7 +102,7 @@ const config = {
 | 
				
			|||||||
                        items: [
 | 
					                        items: [
 | 
				
			||||||
                            {
 | 
					                            {
 | 
				
			||||||
                                label: 'Tutorial',
 | 
					                                label: 'Tutorial',
 | 
				
			||||||
                                to: '/docs/intro',
 | 
					                                to: '/intro',
 | 
				
			||||||
                            },
 | 
					                            },
 | 
				
			||||||
                        ],
 | 
					                        ],
 | 
				
			||||||
                    },
 | 
					                    },
 | 
				
			||||||
@@ -122,7 +128,7 @@ const config = {
 | 
				
			|||||||
                            },
 | 
					                            },
 | 
				
			||||||
                            {
 | 
					                            {
 | 
				
			||||||
                                label: 'GitHub',
 | 
					                                label: 'GitHub',
 | 
				
			||||||
                                href: 'https://github.com/amithkoujalgi/ollama4j',
 | 
					                                href: 'https://github.com/ollama4j/ollama4j',
 | 
				
			||||||
                            },
 | 
					                            },
 | 
				
			||||||
                        ],
 | 
					                        ],
 | 
				
			||||||
                    },
 | 
					                    },
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										1948
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1948
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -14,9 +14,10 @@
 | 
				
			|||||||
    "write-heading-ids": "docusaurus write-heading-ids"
 | 
					    "write-heading-ids": "docusaurus write-heading-ids"
 | 
				
			||||||
  },
 | 
					  },
 | 
				
			||||||
  "dependencies": {
 | 
					  "dependencies": {
 | 
				
			||||||
    "@docusaurus/core": "3.0.1",
 | 
					    "@docusaurus/core": "^3.4.0",
 | 
				
			||||||
    "@docusaurus/preset-classic": "3.0.1",
 | 
					    "@docusaurus/plugin-google-gtag": "^3.4.0",
 | 
				
			||||||
    "@docusaurus/theme-mermaid": "^3.0.1",
 | 
					    "@docusaurus/preset-classic": "^3.4.0",
 | 
				
			||||||
 | 
					    "@docusaurus/theme-mermaid": "^3.4.0",
 | 
				
			||||||
    "@mdx-js/react": "^3.0.0",
 | 
					    "@mdx-js/react": "^3.0.0",
 | 
				
			||||||
    "clsx": "^2.0.0",
 | 
					    "clsx": "^2.0.0",
 | 
				
			||||||
    "prism-react-renderer": "^2.3.0",
 | 
					    "prism-react-renderer": "^2.3.0",
 | 
				
			||||||
@@ -24,8 +25,8 @@
 | 
				
			|||||||
    "react-dom": "^18.0.0"
 | 
					    "react-dom": "^18.0.0"
 | 
				
			||||||
  },
 | 
					  },
 | 
				
			||||||
  "devDependencies": {
 | 
					  "devDependencies": {
 | 
				
			||||||
    "@docusaurus/module-type-aliases": "3.0.1",
 | 
					    "@docusaurus/module-type-aliases": "^3.4.0",
 | 
				
			||||||
    "@docusaurus/types": "3.0.1"
 | 
					    "@docusaurus/types": "^3.4.0"
 | 
				
			||||||
  },
 | 
					  },
 | 
				
			||||||
  "browserslist": {
 | 
					  "browserslist": {
 | 
				
			||||||
    "production": [
 | 
					    "production": [
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -19,7 +19,7 @@ function HomepageHeader() {
 | 
				
			|||||||
            <div className={styles.buttons}>
 | 
					            <div className={styles.buttons}>
 | 
				
			||||||
                <Link
 | 
					                <Link
 | 
				
			||||||
                    className="button button--secondary button--lg"
 | 
					                    className="button button--secondary button--lg"
 | 
				
			||||||
                    to="/docs/intro">
 | 
					                    to="/intro">
 | 
				
			||||||
                    Getting Started
 | 
					                    Getting Started
 | 
				
			||||||
                </Link>
 | 
					                </Link>
 | 
				
			||||||
            </div>
 | 
					            </div>
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										107
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										107
									
								
								pom.xml
									
									
									
									
									
								
							@@ -1,14 +1,16 @@
 | 
				
			|||||||
<?xml version="1.0" encoding="UTF-8"?>
 | 
					<?xml version="1.0" encoding="UTF-8"?>
 | 
				
			||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 | 
					<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 | 
				
			||||||
 | 
					         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 | 
				
			||||||
    <modelVersion>4.0.0</modelVersion>
 | 
					    <modelVersion>4.0.0</modelVersion>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.ollama4j</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.70</version>
 | 
					    <version>ollama4j-revision</version>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <name>Ollama4j</name>
 | 
					    <name>Ollama4j</name>
 | 
				
			||||||
    <description>Java library for interacting with Ollama API.</description>
 | 
					    <description>Java library for interacting with Ollama API.</description>
 | 
				
			||||||
    <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
					    <url>https://github.com/ollama4j/ollama4j</url>
 | 
				
			||||||
 | 
					    <packaging>jar</packaging>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <properties>
 | 
					    <properties>
 | 
				
			||||||
        <maven.compiler.source>11</maven.compiler.source>
 | 
					        <maven.compiler.source>11</maven.compiler.source>
 | 
				
			||||||
@@ -31,15 +33,15 @@
 | 
				
			|||||||
    <licenses>
 | 
					    <licenses>
 | 
				
			||||||
        <license>
 | 
					        <license>
 | 
				
			||||||
            <name>MIT License</name>
 | 
					            <name>MIT License</name>
 | 
				
			||||||
            <url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url>
 | 
					            <url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url>
 | 
				
			||||||
        </license>
 | 
					        </license>
 | 
				
			||||||
    </licenses>
 | 
					    </licenses>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <scm>
 | 
					    <scm>
 | 
				
			||||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
					        <connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection>
 | 
				
			||||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
					        <developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection>
 | 
				
			||||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
					        <url>https://github.com/ollama4j/ollama4j</url>
 | 
				
			||||||
        <tag>v1.0.70</tag>
 | 
					        <tag>ollama4j-revision</tag>
 | 
				
			||||||
    </scm>
 | 
					    </scm>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <build>
 | 
					    <build>
 | 
				
			||||||
@@ -70,27 +72,7 @@
 | 
				
			|||||||
                    </execution>
 | 
					                    </execution>
 | 
				
			||||||
                </executions>
 | 
					                </executions>
 | 
				
			||||||
            </plugin>
 | 
					            </plugin>
 | 
				
			||||||
            <!--            <plugin>-->
 | 
					
 | 
				
			||||||
            <!--                <groupId>org.apache.maven.plugins</groupId>-->
 | 
					 | 
				
			||||||
            <!--                <artifactId>maven-gpg-plugin</artifactId>-->
 | 
					 | 
				
			||||||
            <!--                <version>1.5</version>-->
 | 
					 | 
				
			||||||
            <!--                <executions>-->
 | 
					 | 
				
			||||||
            <!--                    <execution>-->
 | 
					 | 
				
			||||||
            <!--                        <id>sign-artifacts</id>-->
 | 
					 | 
				
			||||||
            <!--                        <phase>verify</phase>-->
 | 
					 | 
				
			||||||
            <!--                        <goals>-->
 | 
					 | 
				
			||||||
            <!--                            <goal>sign</goal>-->
 | 
					 | 
				
			||||||
            <!--                        </goals>-->
 | 
					 | 
				
			||||||
            <!--                        <configuration>-->
 | 
					 | 
				
			||||||
            <!--                            <!– This is necessary for gpg to not try to use the pinentry programs –>-->
 | 
					 | 
				
			||||||
            <!--                            <gpgArguments>-->
 | 
					 | 
				
			||||||
            <!--                                <arg>--pinentry-mode</arg>-->
 | 
					 | 
				
			||||||
            <!--                                <arg>loopback</arg>-->
 | 
					 | 
				
			||||||
            <!--                            </gpgArguments>-->
 | 
					 | 
				
			||||||
            <!--                        </configuration>-->
 | 
					 | 
				
			||||||
            <!--                    </execution>-->
 | 
					 | 
				
			||||||
            <!--                </executions>-->
 | 
					 | 
				
			||||||
            <!--            </plugin>-->
 | 
					 | 
				
			||||||
            <!-- Surefire Plugin for Unit Tests -->
 | 
					            <!-- Surefire Plugin for Unit Tests -->
 | 
				
			||||||
            <plugin>
 | 
					            <plugin>
 | 
				
			||||||
                <groupId>org.apache.maven.plugins</groupId>
 | 
					                <groupId>org.apache.maven.plugins</groupId>
 | 
				
			||||||
@@ -127,18 +109,33 @@
 | 
				
			|||||||
                    </execution>
 | 
					                    </execution>
 | 
				
			||||||
                </executions>
 | 
					                </executions>
 | 
				
			||||||
            </plugin>
 | 
					            </plugin>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            <plugin>
 | 
					            <plugin>
 | 
				
			||||||
                <groupId>org.apache.maven.plugins</groupId>
 | 
					                <groupId>org.apache.maven.plugins</groupId>
 | 
				
			||||||
                <artifactId>maven-release-plugin</artifactId>
 | 
					                <artifactId>maven-gpg-plugin</artifactId>
 | 
				
			||||||
                <version>3.0.1</version>
 | 
					                <version>1.5</version>
 | 
				
			||||||
                <configuration>
 | 
					                <executions>
 | 
				
			||||||
                    <!--                    <goals>install</goals>-->
 | 
					                    <execution>
 | 
				
			||||||
                    <tagNameFormat>v@{project.version}</tagNameFormat>
 | 
					                        <id>sign-artifacts</id>
 | 
				
			||||||
                </configuration>
 | 
					                        <phase>verify</phase>
 | 
				
			||||||
 | 
					                        <goals>
 | 
				
			||||||
 | 
					                            <goal>sign</goal>
 | 
				
			||||||
 | 
					                        </goals>
 | 
				
			||||||
 | 
					                    </execution>
 | 
				
			||||||
 | 
					                </executions>
 | 
				
			||||||
            </plugin>
 | 
					            </plugin>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        </plugins>
 | 
					        </plugins>
 | 
				
			||||||
    </build>
 | 
					    </build>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    <repositories>
 | 
				
			||||||
 | 
					        <repository>
 | 
				
			||||||
 | 
					            <id>gitea</id>
 | 
				
			||||||
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
 | 
					        </repository>
 | 
				
			||||||
 | 
					    </repositories>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <dependencies>
 | 
					    <dependencies>
 | 
				
			||||||
        <dependency>
 | 
					        <dependency>
 | 
				
			||||||
            <groupId>org.projectlombok</groupId>
 | 
					            <groupId>org.projectlombok</groupId>
 | 
				
			||||||
@@ -159,7 +156,7 @@
 | 
				
			|||||||
        <dependency>
 | 
					        <dependency>
 | 
				
			||||||
            <groupId>ch.qos.logback</groupId>
 | 
					            <groupId>ch.qos.logback</groupId>
 | 
				
			||||||
            <artifactId>logback-classic</artifactId>
 | 
					            <artifactId>logback-classic</artifactId>
 | 
				
			||||||
            <version>1.4.12</version>
 | 
					            <version>1.5.6</version>
 | 
				
			||||||
            <scope>test</scope>
 | 
					            <scope>test</scope>
 | 
				
			||||||
        </dependency>
 | 
					        </dependency>
 | 
				
			||||||
        <dependency>
 | 
					        <dependency>
 | 
				
			||||||
@@ -189,16 +186,42 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    <distributionManagement>
 | 
					    <distributionManagement>
 | 
				
			||||||
        <snapshotRepository>
 | 
					        <snapshotRepository>
 | 
				
			||||||
            <id>ossrh</id>
 | 
					            <id>gitea</id>
 | 
				
			||||||
            <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
        </snapshotRepository>
 | 
					        </snapshotRepository>
 | 
				
			||||||
        <repository>
 | 
					        <repository>
 | 
				
			||||||
            <id>ossrh</id>
 | 
					            <id>gitea</id>
 | 
				
			||||||
            <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>
 | 
					            <url>https://gitea.seeseepuff.be/api/packages/seeseemelk/maven</url>
 | 
				
			||||||
        </repository>
 | 
					        </repository>
 | 
				
			||||||
    </distributionManagement>
 | 
					    </distributionManagement>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <profiles>
 | 
					    <profiles>
 | 
				
			||||||
 | 
					        <profile>
 | 
				
			||||||
 | 
					            <id>ossrh</id>
 | 
				
			||||||
 | 
					            <activation>
 | 
				
			||||||
 | 
					                <activeByDefault>true</activeByDefault>
 | 
				
			||||||
 | 
					            </activation>
 | 
				
			||||||
 | 
					            <properties>
 | 
				
			||||||
 | 
					                <gpg.executable>gpg2</gpg.executable>
 | 
				
			||||||
 | 
					                <test.env>unit</test.env>
 | 
				
			||||||
 | 
					                <skipUnitTests>false</skipUnitTests>
 | 
				
			||||||
 | 
					                <skipIntegrationTests>true</skipIntegrationTests>
 | 
				
			||||||
 | 
					            </properties>
 | 
				
			||||||
 | 
					            <build>
 | 
				
			||||||
 | 
					                <plugins>
 | 
				
			||||||
 | 
					                    <plugin>
 | 
				
			||||||
 | 
					                        <groupId>org.sonatype.central</groupId>
 | 
				
			||||||
 | 
					                        <artifactId>central-publishing-maven-plugin</artifactId>
 | 
				
			||||||
 | 
					                        <version>0.5.0</version>
 | 
				
			||||||
 | 
					                        <extensions>true</extensions>
 | 
				
			||||||
 | 
					                        <configuration>
 | 
				
			||||||
 | 
					                            <publishingServerId>mvn-repo-id</publishingServerId>
 | 
				
			||||||
 | 
					                            <autoPublish>true</autoPublish>
 | 
				
			||||||
 | 
					                        </configuration>
 | 
				
			||||||
 | 
					                    </plugin>
 | 
				
			||||||
 | 
					                </plugins>
 | 
				
			||||||
 | 
					            </build>
 | 
				
			||||||
 | 
					        </profile>
 | 
				
			||||||
        <profile>
 | 
					        <profile>
 | 
				
			||||||
            <id>unit-tests</id>
 | 
					            <id>unit-tests</id>
 | 
				
			||||||
            <properties>
 | 
					            <properties>
 | 
				
			||||||
@@ -207,7 +230,7 @@
 | 
				
			|||||||
                <skipIntegrationTests>true</skipIntegrationTests>
 | 
					                <skipIntegrationTests>true</skipIntegrationTests>
 | 
				
			||||||
            </properties>
 | 
					            </properties>
 | 
				
			||||||
            <activation>
 | 
					            <activation>
 | 
				
			||||||
                <activeByDefault>true</activeByDefault>
 | 
					                <activeByDefault>false</activeByDefault>
 | 
				
			||||||
            </activation>
 | 
					            </activation>
 | 
				
			||||||
            <build>
 | 
					            <build>
 | 
				
			||||||
                <plugins>
 | 
					                <plugins>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import java.util.function.Consumer;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
public interface OllamaStreamHandler extends Consumer<String>{
 | 
					 | 
				
			||||||
    void accept(String message);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,143 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					 | 
				
			||||||
import java.io.BufferedReader;
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					 | 
				
			||||||
import java.io.InputStream;
 | 
					 | 
				
			||||||
import java.io.InputStreamReader;
 | 
					 | 
				
			||||||
import java.net.http.HttpClient;
 | 
					 | 
				
			||||||
import java.net.http.HttpRequest;
 | 
					 | 
				
			||||||
import java.net.http.HttpResponse;
 | 
					 | 
				
			||||||
import java.nio.charset.StandardCharsets;
 | 
					 | 
				
			||||||
import java.time.Duration;
 | 
					 | 
				
			||||||
import java.util.LinkedList;
 | 
					 | 
				
			||||||
import java.util.Queue;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
import lombok.EqualsAndHashCode;
 | 
					 | 
				
			||||||
import lombok.Getter;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Data
 | 
					 | 
				
			||||||
@EqualsAndHashCode(callSuper = true)
 | 
					 | 
				
			||||||
@SuppressWarnings("unused")
 | 
					 | 
				
			||||||
public class OllamaAsyncResultCallback extends Thread {
 | 
					 | 
				
			||||||
  private final HttpRequest.Builder requestBuilder;
 | 
					 | 
				
			||||||
  private final OllamaGenerateRequestModel ollamaRequestModel;
 | 
					 | 
				
			||||||
  private final Queue<String> queue = new LinkedList<>();
 | 
					 | 
				
			||||||
  private String result;
 | 
					 | 
				
			||||||
  private boolean isDone;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  /**
 | 
					 | 
				
			||||||
   * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
 | 
					 | 
				
			||||||
   * failure. If the request was a failure, the `getResponse()` method will return the error
 | 
					 | 
				
			||||||
   * message.
 | 
					 | 
				
			||||||
   */
 | 
					 | 
				
			||||||
  @Getter private boolean succeeded;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private long requestTimeoutSeconds;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  /**
 | 
					 | 
				
			||||||
   * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
 | 
					 | 
				
			||||||
   * server.
 | 
					 | 
				
			||||||
   */
 | 
					 | 
				
			||||||
  @Getter private int httpStatusCode;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  /** -- GETTER -- Returns the response time in milliseconds. */
 | 
					 | 
				
			||||||
  @Getter private long responseTime = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaAsyncResultCallback(
 | 
					 | 
				
			||||||
      HttpRequest.Builder requestBuilder,
 | 
					 | 
				
			||||||
      OllamaGenerateRequestModel ollamaRequestModel,
 | 
					 | 
				
			||||||
      long requestTimeoutSeconds) {
 | 
					 | 
				
			||||||
    this.requestBuilder = requestBuilder;
 | 
					 | 
				
			||||||
    this.ollamaRequestModel = ollamaRequestModel;
 | 
					 | 
				
			||||||
    this.isDone = false;
 | 
					 | 
				
			||||||
    this.result = "";
 | 
					 | 
				
			||||||
    this.queue.add("");
 | 
					 | 
				
			||||||
    this.requestTimeoutSeconds = requestTimeoutSeconds;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Override
 | 
					 | 
				
			||||||
  public void run() {
 | 
					 | 
				
			||||||
    HttpClient httpClient = HttpClient.newHttpClient();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      long startTime = System.currentTimeMillis();
 | 
					 | 
				
			||||||
      HttpRequest request =
 | 
					 | 
				
			||||||
          requestBuilder
 | 
					 | 
				
			||||||
              .POST(
 | 
					 | 
				
			||||||
                  HttpRequest.BodyPublishers.ofString(
 | 
					 | 
				
			||||||
                      Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
 | 
					 | 
				
			||||||
              .header("Content-Type", "application/json")
 | 
					 | 
				
			||||||
              .timeout(Duration.ofSeconds(requestTimeoutSeconds))
 | 
					 | 
				
			||||||
              .build();
 | 
					 | 
				
			||||||
      HttpResponse<InputStream> response =
 | 
					 | 
				
			||||||
          httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
 | 
					 | 
				
			||||||
      int statusCode = response.statusCode();
 | 
					 | 
				
			||||||
      this.httpStatusCode = statusCode;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      InputStream responseBodyStream = response.body();
 | 
					 | 
				
			||||||
      try (BufferedReader reader =
 | 
					 | 
				
			||||||
          new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
 | 
					 | 
				
			||||||
        String line;
 | 
					 | 
				
			||||||
        StringBuilder responseBuffer = new StringBuilder();
 | 
					 | 
				
			||||||
        while ((line = reader.readLine()) != null) {
 | 
					 | 
				
			||||||
          if (statusCode == 404) {
 | 
					 | 
				
			||||||
            OllamaErrorResponseModel ollamaResponseModel =
 | 
					 | 
				
			||||||
                Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
					 | 
				
			||||||
            queue.add(ollamaResponseModel.getError());
 | 
					 | 
				
			||||||
            responseBuffer.append(ollamaResponseModel.getError());
 | 
					 | 
				
			||||||
          } else {
 | 
					 | 
				
			||||||
            OllamaGenerateResponseModel ollamaResponseModel =
 | 
					 | 
				
			||||||
                Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
					 | 
				
			||||||
            queue.add(ollamaResponseModel.getResponse());
 | 
					 | 
				
			||||||
            if (!ollamaResponseModel.isDone()) {
 | 
					 | 
				
			||||||
              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
          }
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        this.isDone = true;
 | 
					 | 
				
			||||||
        this.succeeded = true;
 | 
					 | 
				
			||||||
        this.result = responseBuffer.toString();
 | 
					 | 
				
			||||||
        long endTime = System.currentTimeMillis();
 | 
					 | 
				
			||||||
        responseTime = endTime - startTime;
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
      if (statusCode != 200) {
 | 
					 | 
				
			||||||
        throw new OllamaBaseException(this.result);
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
    } catch (IOException | InterruptedException | OllamaBaseException e) {
 | 
					 | 
				
			||||||
      this.isDone = true;
 | 
					 | 
				
			||||||
      this.succeeded = false;
 | 
					 | 
				
			||||||
      this.result = "[FAILED] " + e.getMessage();
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  /**
 | 
					 | 
				
			||||||
   * Returns the status of the thread. This does not indicate that the request was successful or a
 | 
					 | 
				
			||||||
   * failure, rather it is just a status flag to indicate if the thread is active or ended.
 | 
					 | 
				
			||||||
   *
 | 
					 | 
				
			||||||
   * @return boolean - status
 | 
					 | 
				
			||||||
   */
 | 
					 | 
				
			||||||
  public boolean isComplete() {
 | 
					 | 
				
			||||||
    return isDone;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  /**
 | 
					 | 
				
			||||||
   * Returns the final completion/response when the execution completes. Does not return intermediate results.
 | 
					 | 
				
			||||||
   *
 | 
					 | 
				
			||||||
   * @return String completion/response text
 | 
					 | 
				
			||||||
   */
 | 
					 | 
				
			||||||
  public String getResponse() {
 | 
					 | 
				
			||||||
    return result;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public Queue<String> getStream() {
 | 
					 | 
				
			||||||
    return queue;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
 | 
					 | 
				
			||||||
    this.requestTimeoutSeconds = requestTimeoutSeconds;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,46 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import java.util.List;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import lombok.Getter;
 | 
					 | 
				
			||||||
import lombok.Setter;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Getter
 | 
					 | 
				
			||||||
@Setter
 | 
					 | 
				
			||||||
public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private String prompt;
 | 
					 | 
				
			||||||
  private List<String> images;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private String system;
 | 
					 | 
				
			||||||
  private String context;
 | 
					 | 
				
			||||||
  private boolean raw;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaGenerateRequestModel() {
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaGenerateRequestModel(String model, String prompt) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
    this.images = images;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @Override
 | 
					 | 
				
			||||||
  public boolean equals(Object o) {
 | 
					 | 
				
			||||||
    if (!(o instanceof OllamaGenerateRequestModel)) {
 | 
					 | 
				
			||||||
      return false;
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return this.toString().equals(o.toString());
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,54 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					 | 
				
			||||||
import org.slf4j.Logger;
 | 
					 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    private OllamaGenerateStreamObserver streamObserver;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
					 | 
				
			||||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @Override
 | 
					 | 
				
			||||||
    protected String getEndpointSuffix() {
 | 
					 | 
				
			||||||
        return "/api/generate";
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @Override
 | 
					 | 
				
			||||||
    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
					 | 
				
			||||||
                try {
 | 
					 | 
				
			||||||
                    OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
					 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					 | 
				
			||||||
                    if(streamObserver != null) {
 | 
					 | 
				
			||||||
                        streamObserver.notify(ollamaResponseModel);
 | 
					 | 
				
			||||||
                    }
 | 
					 | 
				
			||||||
                    return ollamaResponseModel.isDone();
 | 
					 | 
				
			||||||
                } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
					 | 
				
			||||||
                    return true;
 | 
					 | 
				
			||||||
                }         
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
					 | 
				
			||||||
        throws OllamaBaseException, IOException, InterruptedException {
 | 
					 | 
				
			||||||
    streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
					 | 
				
			||||||
    return super.callSync(body);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,17 +1,23 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core;
 | 
					package io.github.ollama4j;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.*;
 | 
					import io.github.ollama4j.exceptions.ToolInvocationException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
 | 
					import io.github.ollama4j.exceptions.ToolNotFoundException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessage;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.models.ps.ModelsProcessResponse;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.request.*;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.*;
 | 
				
			||||||
 | 
					import io.github.ollama4j.tools.*;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -25,9 +31,7 @@ import java.net.http.HttpResponse;
 | 
				
			|||||||
import java.nio.charset.StandardCharsets;
 | 
					import java.nio.charset.StandardCharsets;
 | 
				
			||||||
import java.nio.file.Files;
 | 
					import java.nio.file.Files;
 | 
				
			||||||
import java.time.Duration;
 | 
					import java.time.Duration;
 | 
				
			||||||
import java.util.ArrayList;
 | 
					import java.util.*;
 | 
				
			||||||
import java.util.Base64;
 | 
					 | 
				
			||||||
import java.util.List;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * The base Ollama API class.
 | 
					 * The base Ollama API class.
 | 
				
			||||||
@@ -37,12 +41,31 @@ public class OllamaAPI {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
 | 
					    private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
 | 
				
			||||||
    private final String host;
 | 
					    private final String host;
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * -- SETTER --
 | 
				
			||||||
 | 
					     * Set request timeout in seconds. Default is 3 seconds.
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    @Setter
 | 
				
			||||||
    private long requestTimeoutSeconds = 10;
 | 
					    private long requestTimeoutSeconds = 10;
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * -- SETTER --
 | 
				
			||||||
 | 
					     * Set/unset logging of responses
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    @Setter
 | 
				
			||||||
    private boolean verbose = true;
 | 
					    private boolean verbose = true;
 | 
				
			||||||
    private BasicAuth basicAuth;
 | 
					    private BasicAuth basicAuth;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private final ToolRegistry toolRegistry = new ToolRegistry();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Instantiates the Ollama API.
 | 
					     * Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a>
 | 
				
			||||||
 | 
					     **/
 | 
				
			||||||
 | 
					    public OllamaAPI() {
 | 
				
			||||||
 | 
					        this.host = "http://localhost:11434";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * Instantiates the Ollama API with specified Ollama host address.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
     * @param host the host address of Ollama server
 | 
					     * @param host the host address of Ollama server
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
@@ -54,24 +77,6 @@ public class OllamaAPI {
 | 
				
			|||||||
        }
 | 
					        }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					 | 
				
			||||||
     * Set request timeout in seconds. Default is 3 seconds.
 | 
					 | 
				
			||||||
     *
 | 
					 | 
				
			||||||
     * @param requestTimeoutSeconds the request timeout in seconds
 | 
					 | 
				
			||||||
     */
 | 
					 | 
				
			||||||
    public void setRequestTimeoutSeconds(long requestTimeoutSeconds) {
 | 
					 | 
				
			||||||
        this.requestTimeoutSeconds = requestTimeoutSeconds;
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    /**
 | 
					 | 
				
			||||||
     * Set/unset logging of responses
 | 
					 | 
				
			||||||
     *
 | 
					 | 
				
			||||||
     * @param verbose true/false
 | 
					 | 
				
			||||||
     */
 | 
					 | 
				
			||||||
    public void setVerbose(boolean verbose) {
 | 
					 | 
				
			||||||
        this.verbose = verbose;
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
 | 
					     * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
@@ -113,6 +118,37 @@ public class OllamaAPI {
 | 
				
			|||||||
        return statusCode == 200;
 | 
					        return statusCode == 200;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * Provides a list of running models and details about each model currently loaded into memory.
 | 
				
			||||||
 | 
					     *
 | 
				
			||||||
 | 
					     * @return ModelsProcessResponse
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
 | 
				
			||||||
 | 
					        String url = this.host + "/api/ps";
 | 
				
			||||||
 | 
					        HttpClient httpClient = HttpClient.newHttpClient();
 | 
				
			||||||
 | 
					        HttpRequest httpRequest = null;
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            httpRequest =
 | 
				
			||||||
 | 
					                    getRequestBuilderDefault(new URI(url))
 | 
				
			||||||
 | 
					                            .header("Accept", "application/json")
 | 
				
			||||||
 | 
					                            .header("Content-type", "application/json")
 | 
				
			||||||
 | 
					                            .GET()
 | 
				
			||||||
 | 
					                            .build();
 | 
				
			||||||
 | 
					        } catch (URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        HttpResponse<String> response = null;
 | 
				
			||||||
 | 
					        response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
 | 
				
			||||||
 | 
					        int statusCode = response.statusCode();
 | 
				
			||||||
 | 
					        String responseString = response.body();
 | 
				
			||||||
 | 
					        if (statusCode == 200) {
 | 
				
			||||||
 | 
					            return Utils.getObjectMapper()
 | 
				
			||||||
 | 
					                    .readValue(responseString, ModelsProcessResponse.class);
 | 
				
			||||||
 | 
					        } else {
 | 
				
			||||||
 | 
					            throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * List available models from Ollama server.
 | 
					     * List available models from Ollama server.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
@@ -339,6 +375,7 @@ public class OllamaAPI {
 | 
				
			|||||||
        }
 | 
					        }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Generate response for a question to a model running on Ollama server. This is a sync/blocking
 | 
					     * Generate response for a question to a model running on Ollama server. This is a sync/blocking
 | 
				
			||||||
     * call.
 | 
					     * call.
 | 
				
			||||||
@@ -351,23 +388,67 @@ public class OllamaAPI {
 | 
				
			|||||||
     * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
					     * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
     * @return OllamaResult that includes response text and time taken for response
 | 
					     * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
 | 
					    public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
 | 
					        ollamaRequestModel.setRaw(raw);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Convenience method to call Ollama API without streaming responses.
 | 
					     * Generates response using the specified AI model and prompt (in blocking mode).
 | 
				
			||||||
     * <p>
 | 
					     * <p>
 | 
				
			||||||
     * Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
 | 
					     * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					     *
 | 
				
			||||||
 | 
					     * @param model   The name or identifier of the AI model to use for generating the response.
 | 
				
			||||||
 | 
					     * @param prompt  The input text or prompt to provide to the AI model.
 | 
				
			||||||
 | 
					     * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context.
 | 
				
			||||||
 | 
					     * @param options Additional options or configurations to use when generating the response.
 | 
				
			||||||
 | 
					     * @return {@link OllamaResult}
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaResult generate(String model, String prompt, Options options)
 | 
					    public OllamaResult generate(String model, String prompt, boolean raw, Options options)
 | 
				
			||||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        return generate(model, prompt, options, null);
 | 
					        return generate(model, prompt, raw, options, null);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools
 | 
				
			||||||
 | 
					     * on the generated response.
 | 
				
			||||||
 | 
					     *
 | 
				
			||||||
 | 
					     * @param model   The name or identifier of the AI model to use for generating the response.
 | 
				
			||||||
 | 
					     * @param prompt  The input text or prompt to provide to the AI model.
 | 
				
			||||||
 | 
					     * @param options Additional options or configurations to use when generating the response.
 | 
				
			||||||
 | 
					     * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output.
 | 
				
			||||||
 | 
					     * @throws OllamaBaseException  If there is an error related to the Ollama API or service.
 | 
				
			||||||
 | 
					     * @throws IOException          If there is an error related to input/output operations.
 | 
				
			||||||
 | 
					     * @throws InterruptedException If the method is interrupted while waiting for the AI model
 | 
				
			||||||
 | 
					     *                              to generate the response or for the tools to be invoked.
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    public OllamaToolsResult generateWithTools(String model, String prompt, Options options)
 | 
				
			||||||
 | 
					            throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
 | 
				
			||||||
 | 
					        boolean raw = true;
 | 
				
			||||||
 | 
					        OllamaToolsResult toolResult = new OllamaToolsResult();
 | 
				
			||||||
 | 
					        Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaResult result = generate(model, prompt, raw, options, null);
 | 
				
			||||||
 | 
					        toolResult.setModelResult(result);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String toolsResponse = result.getResponse();
 | 
				
			||||||
 | 
					        if (toolsResponse.contains("[TOOL_CALLS]")) {
 | 
				
			||||||
 | 
					            toolsResponse = toolsResponse.replace("[TOOL_CALLS]", "");
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class));
 | 
				
			||||||
 | 
					        for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) {
 | 
				
			||||||
 | 
					            toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec));
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        toolResult.setToolResults(toolResults);
 | 
				
			||||||
 | 
					        return toolResult;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Generate response for a question to a model running on Ollama server and get a callback handle
 | 
					     * Generate response for a question to a model running on Ollama server and get a callback handle
 | 
				
			||||||
     * that can be used to check for status and get the response from the model later. This would be
 | 
					     * that can be used to check for status and get the response from the model later. This would be
 | 
				
			||||||
@@ -377,15 +458,15 @@ public class OllamaAPI {
 | 
				
			|||||||
     * @param prompt the prompt/question text
 | 
					     * @param prompt the prompt/question text
 | 
				
			||||||
     * @return the ollama async result callback handle
 | 
					     * @return the ollama async result callback handle
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
					    public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) {
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
 | 
					        ollamaRequestModel.setRaw(raw);
 | 
				
			||||||
        URI uri = URI.create(this.host + "/api/generate");
 | 
					        URI uri = URI.create(this.host + "/api/generate");
 | 
				
			||||||
        OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
					        OllamaAsyncResultStreamer ollamaAsyncResultStreamer =
 | 
				
			||||||
                new OllamaAsyncResultCallback(
 | 
					                new OllamaAsyncResultStreamer(
 | 
				
			||||||
                        getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
 | 
					                        getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds);
 | 
				
			||||||
        ollamaAsyncResultCallback.start();
 | 
					        ollamaAsyncResultStreamer.start();
 | 
				
			||||||
        return ollamaAsyncResultCallback;
 | 
					        return ollamaAsyncResultStreamer;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
@@ -408,7 +489,7 @@ public class OllamaAPI {
 | 
				
			|||||||
        for (File imageFile : imageFiles) {
 | 
					        for (File imageFile : imageFiles) {
 | 
				
			||||||
            images.add(encodeFileToBase64(imageFile));
 | 
					            images.add(encodeFileToBase64(imageFile));
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -444,7 +525,7 @@ public class OllamaAPI {
 | 
				
			|||||||
        for (String imageURL : imageURLs) {
 | 
					        for (String imageURL : imageURLs) {
 | 
				
			||||||
            images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
					            images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
        OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
					        OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images);
 | 
				
			||||||
        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					        ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
					        return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -478,33 +559,33 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
					     * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
				
			||||||
     * <p>
 | 
					     * <p>
 | 
				
			||||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
					     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
     * @param request request object to be sent to the server
 | 
					     * @param request request object to be sent to the server
 | 
				
			||||||
     * @return
 | 
					     * @return {@link OllamaChatResult}
 | 
				
			||||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
					     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
				
			||||||
     * @throws IOException          in case the responseStream can not be read
 | 
					     * @throws IOException          in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
 | 
					    public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        return chat(request, null);
 | 
					        return chat(request, null);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    /**
 | 
					    /**
 | 
				
			||||||
     * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
					     * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
				
			||||||
     * <p>
 | 
					     * <p>
 | 
				
			||||||
     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
					     * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
     *
 | 
					     *
 | 
				
			||||||
     * @param request       request object to be sent to the server
 | 
					     * @param request       request object to be sent to the server
 | 
				
			||||||
     * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
 | 
					     * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
 | 
				
			||||||
     * @return
 | 
					     * @return {@link OllamaChatResult}
 | 
				
			||||||
     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
					     * @throws OllamaBaseException  any response code than 200 has been returned
 | 
				
			||||||
     * @throws IOException          in case the responseStream can not be read
 | 
					     * @throws IOException          in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
 | 
					    public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					        OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
        OllamaResult result;
 | 
					        OllamaResult result;
 | 
				
			||||||
        if (streamHandler != null) {
 | 
					        if (streamHandler != null) {
 | 
				
			||||||
@@ -516,6 +597,10 @@ public class OllamaAPI {
 | 
				
			|||||||
        return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
					        return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void registerTool(Tools.ToolSpecification toolSpecification) {
 | 
				
			||||||
 | 
					        toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // technical private methods //
 | 
					    // technical private methods //
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static String encodeFileToBase64(File file) throws IOException {
 | 
					    private static String encodeFileToBase64(File file) throws IOException {
 | 
				
			||||||
@@ -527,7 +612,7 @@ public class OllamaAPI {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaResult generateSyncForOllamaRequestModel(
 | 
					    private OllamaResult generateSyncForOllamaRequestModel(
 | 
				
			||||||
            OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
					            OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
				
			||||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateEndpointCaller requestCaller =
 | 
					        OllamaGenerateEndpointCaller requestCaller =
 | 
				
			||||||
                new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					                new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
@@ -576,4 +661,22 @@ public class OllamaAPI {
 | 
				
			|||||||
    private boolean isBasicAuthCredentialsSet() {
 | 
					    private boolean isBasicAuthCredentialsSet() {
 | 
				
			||||||
        return basicAuth != null;
 | 
					        return basicAuth != null;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            String methodName = toolFunctionCallSpec.getName();
 | 
				
			||||||
 | 
					            Map<String, Object> arguments = toolFunctionCallSpec.getArguments();
 | 
				
			||||||
 | 
					            ToolFunction function = toolRegistry.getFunction(methodName);
 | 
				
			||||||
 | 
					            if (verbose) {
 | 
				
			||||||
 | 
					                logger.debug("Invoking function {} with arguments {}", methodName, arguments);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            if (function == null) {
 | 
				
			||||||
 | 
					                throw new ToolNotFoundException("No such tool: " + methodName);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            return function.apply(arguments);
 | 
				
			||||||
 | 
					        } catch (Exception e) {
 | 
				
			||||||
 | 
					            throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.exceptions;
 | 
					package io.github.ollama4j.exceptions;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaBaseException extends Exception {
 | 
					public class OllamaBaseException extends Exception {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -0,0 +1,8 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.exceptions;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class ToolInvocationException extends Exception {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public ToolInvocationException(String s, Exception e) {
 | 
				
			||||||
 | 
					        super(s, e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,8 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.exceptions;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class ToolNotFoundException extends Exception {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public ToolNotFoundException(String s) {
 | 
				
			||||||
 | 
					        super(s);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.impl;
 | 
					package io.github.ollama4j.impl;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
 | 
					public class ConsoleOutputStreamHandler implements OllamaStreamHandler {
 | 
				
			||||||
    private final StringBuffer response = new StringBuffer();
 | 
					    private final StringBuffer response = new StringBuffer();
 | 
				
			||||||
@@ -1,11 +1,11 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
					import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer;
 | 
					import io.github.ollama4j.utils.FileToBase64Serializer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonValue;
 | 
					import com.fasterxml.jackson.annotation.JsonValue;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,8 +1,9 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.models.request.OllamaCommonRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.Getter;
 | 
					import lombok.Getter;
 | 
				
			||||||
import lombok.Setter;
 | 
					import lombok.Setter;
 | 
				
			||||||
@@ -16,20 +17,20 @@ import lombok.Setter;
 | 
				
			|||||||
 */
 | 
					 */
 | 
				
			||||||
@Getter
 | 
					@Getter
 | 
				
			||||||
@Setter
 | 
					@Setter
 | 
				
			||||||
public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
 | 
					public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private List<OllamaChatMessage> messages;
 | 
					  private List<OllamaChatMessage> messages;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaChatRequestModel() {}
 | 
					  public OllamaChatRequest() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
 | 
					  public OllamaChatRequest(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
    this.model = model;
 | 
					    this.model = model;
 | 
				
			||||||
    this.messages = messages;
 | 
					    this.messages = messages;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Override
 | 
					  @Override
 | 
				
			||||||
  public boolean equals(Object o) {
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
    if (!(o instanceof OllamaChatRequestModel)) {
 | 
					    if (!(o instanceof OllamaChatRequest)) {
 | 
				
			||||||
      return false;
 | 
					      return false;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,9 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
@@ -8,101 +13,92 @@ import java.util.ArrayList;
 | 
				
			|||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
import java.util.stream.Collectors;
 | 
					import java.util.stream.Collectors;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern.
 | 
					 * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
public class OllamaChatRequestBuilder {
 | 
					public class OllamaChatRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){
 | 
					    private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
        request = new OllamaChatRequestModel(model, messages);
 | 
					        request = new OllamaChatRequest(model, messages);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestModel request;
 | 
					    private OllamaChatRequest request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static OllamaChatRequestBuilder getInstance(String model){
 | 
					    public static OllamaChatRequestBuilder getInstance(String model) {
 | 
				
			||||||
        return new OllamaChatRequestBuilder(model, new ArrayList<>());
 | 
					        return new OllamaChatRequestBuilder(model, new ArrayList<>());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestModel build(){
 | 
					    public OllamaChatRequest build() {
 | 
				
			||||||
        return request;
 | 
					        return request;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public void reset(){
 | 
					    public void reset() {
 | 
				
			||||||
        request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>());
 | 
					        request = new OllamaChatRequest(request.getModel(), new ArrayList<>());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){
 | 
					    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) {
 | 
				
			||||||
        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
					        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        List<byte[]> binaryImages = images.stream().map(file -> {
 | 
					        List<byte[]> binaryImages = images.stream().map(file -> {
 | 
				
			||||||
            try {
 | 
					            try {
 | 
				
			||||||
                return Files.readAllBytes(file.toPath());
 | 
					                return Files.readAllBytes(file.toPath());
 | 
				
			||||||
            } catch (IOException e) {
 | 
					            } catch (IOException e) {
 | 
				
			||||||
                LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e);
 | 
					                LOG.warn(String.format("File '%s' could not be accessed, will not add to message!", file.toPath()), e);
 | 
				
			||||||
                return new byte[0];
 | 
					                return new byte[0];
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
        }).collect(Collectors.toList());
 | 
					        }).collect(Collectors.toList());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        messages.add(new OllamaChatMessage(role,content,binaryImages));
 | 
					        messages.add(new OllamaChatMessage(role, content, binaryImages));
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){
 | 
					    public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls) {
 | 
				
			||||||
        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
					        List<OllamaChatMessage> messages = this.request.getMessages();
 | 
				
			||||||
        List<byte[]> binaryImages = null;
 | 
					        List<byte[]> binaryImages = null;
 | 
				
			||||||
        if(imageUrls.length>0){
 | 
					        if (imageUrls.length > 0) {
 | 
				
			||||||
            binaryImages = new ArrayList<>();
 | 
					            binaryImages = new ArrayList<>();
 | 
				
			||||||
            for (String imageUrl : imageUrls) {
 | 
					            for (String imageUrl : imageUrls) {
 | 
				
			||||||
                try{
 | 
					                try {
 | 
				
			||||||
                    binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
 | 
					                    binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl));
 | 
				
			||||||
                }
 | 
					                } catch (URISyntaxException e) {
 | 
				
			||||||
                    catch (URISyntaxException e){
 | 
					                    LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!", imageUrl), e);
 | 
				
			||||||
                        LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e);
 | 
					                } catch (IOException e) {
 | 
				
			||||||
                }
 | 
					                    LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!", imageUrl), e);
 | 
				
			||||||
                catch (IOException e){
 | 
					 | 
				
			||||||
                    LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e);
 | 
					 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        messages.add(new OllamaChatMessage(role,content,binaryImages));
 | 
					        messages.add(new OllamaChatMessage(role, content, binaryImages));
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){
 | 
					    public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) {
 | 
				
			||||||
        this.request.getMessages().addAll(messages);
 | 
					        return new OllamaChatRequestBuilder(request.getModel(), messages);
 | 
				
			||||||
        return this;
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withOptions(Options options){
 | 
					    public OllamaChatRequestBuilder withOptions(Options options) {
 | 
				
			||||||
        this.request.setOptions(options.getOptionsMap());
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withGetJsonResponse(){
 | 
					    public OllamaChatRequestBuilder withGetJsonResponse() {
 | 
				
			||||||
        this.request.setReturnFormatJson(true);
 | 
					        this.request.setReturnFormatJson(true);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withTemplate(String template){
 | 
					    public OllamaChatRequestBuilder withTemplate(String template) {
 | 
				
			||||||
        this.request.setTemplate(template);
 | 
					        this.request.setTemplate(template);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withStreaming(){
 | 
					    public OllamaChatRequestBuilder withStreaming() {
 | 
				
			||||||
        this.request.setStream(true);
 | 
					        this.request.setStream(true);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withKeepAlive(String keepAlive){
 | 
					    public OllamaChatRequestBuilder withKeepAlive(String keepAlive) {
 | 
				
			||||||
        this.request.setKeepAlive(keepAlive);
 | 
					        this.request.setKeepAlive(keepAlive);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -1,8 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
 | 
					 * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the
 | 
				
			||||||
@@ -16,17 +16,11 @@ public class OllamaChatResult extends OllamaResult{
 | 
				
			|||||||
            List<OllamaChatMessage> chatHistory) {
 | 
					            List<OllamaChatMessage> chatHistory) {
 | 
				
			||||||
        super(response, responseTime, httpStatusCode);
 | 
					        super(response, responseTime, httpStatusCode);
 | 
				
			||||||
        this.chatHistory = chatHistory;
 | 
					        this.chatHistory = chatHistory;
 | 
				
			||||||
        appendAnswerToChatHistory(response);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public List<OllamaChatMessage> getChatHistory() {
 | 
					    public List<OllamaChatMessage> getChatHistory() {
 | 
				
			||||||
        return chatHistory;
 | 
					        return chatHistory;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    private void appendAnswerToChatHistory(String answer){
 | 
					 | 
				
			||||||
        OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer);
 | 
					 | 
				
			||||||
        this.chatHistory.add(assistantMessage);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    
 | 
					 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,30 +1,27 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.ollama4j.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.ArrayList;
 | 
					import java.util.ArrayList;
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
public class OllamaChatStreamObserver {
 | 
					public class OllamaChatStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaStreamHandler streamHandler;
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
					    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private String message = "";
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
					    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
        this.streamHandler = streamHandler;
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public void notify(OllamaChatResponseModel currentResponsePart){
 | 
					    public void notify(OllamaChatResponseModel currentResponsePart) {
 | 
				
			||||||
        responseParts.add(currentResponsePart);
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
        handleCurrentResponsePart(currentResponsePart);
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
 | 
					    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) {
 | 
				
			||||||
        message = message + currentResponsePart.getMessage().getContent();
 | 
					        streamHandler.accept(currentResponsePart.getMessage().getContent());
 | 
				
			||||||
        streamHandler.accept(message);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
					package io.github.ollama4j.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
					package io.github.ollama4j.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaEmbeddingsRequestBuilder {
 | 
					public class OllamaEmbeddingsRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
					package io.github.ollama4j.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
import java.util.Map;
 | 
					import java.util.Map;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
@@ -0,0 +1,46 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.request.OllamaCommonRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Getter
 | 
				
			||||||
 | 
					@Setter
 | 
				
			||||||
 | 
					public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String prompt;
 | 
				
			||||||
 | 
					  private List<String> images;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String system;
 | 
				
			||||||
 | 
					  private String context;
 | 
				
			||||||
 | 
					  private boolean raw;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequest() {
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequest(String model, String prompt) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequest(String model, String prompt, List<String> images) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					    this.images = images;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
 | 
					    if (!(o instanceof OllamaGenerateRequest)) {
 | 
				
			||||||
 | 
					      return false;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return this.toString().equals(o.toString());
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,24 +1,24 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					import io.github.ollama4j.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} 
 | 
					 * Helper class for creating {@link OllamaGenerateRequest}
 | 
				
			||||||
 * objects using the builder-pattern.
 | 
					 * objects using the builder-pattern.
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
public class OllamaGenerateRequestBuilder {
 | 
					public class OllamaGenerateRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
					    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
				
			||||||
        request = new OllamaGenerateRequestModel(model, prompt);
 | 
					        request = new OllamaGenerateRequest(model, prompt);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestModel request;
 | 
					    private OllamaGenerateRequest request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
					    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
				
			||||||
        return new OllamaGenerateRequestBuilder(model,"");
 | 
					        return new OllamaGenerateRequestBuilder(model,"");
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaGenerateRequestModel build(){
 | 
					    public OllamaGenerateRequest build(){
 | 
				
			||||||
        return request;
 | 
					        return request;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
@@ -1,10 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.ArrayList;
 | 
					import java.util.ArrayList;
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
public class OllamaGenerateStreamObserver {
 | 
					public class OllamaGenerateStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaStreamHandler streamHandler;
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
@@ -17,12 +15,12 @@ public class OllamaGenerateStreamObserver {
 | 
				
			|||||||
        this.streamHandler = streamHandler;
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public void notify(OllamaGenerateResponseModel currentResponsePart){
 | 
					    public void notify(OllamaGenerateResponseModel currentResponsePart) {
 | 
				
			||||||
        responseParts.add(currentResponsePart);
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
        handleCurrentResponsePart(currentResponsePart);
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
 | 
					    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) {
 | 
				
			||||||
        message = message + currentResponsePart.getResponse();
 | 
					        message = message + currentResponsePart.getResponse();
 | 
				
			||||||
        streamHandler.accept(message);
 | 
					        streamHandler.accept(message);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -0,0 +1,7 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.function.Consumer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public interface OllamaStreamHandler extends Consumer<String> {
 | 
				
			||||||
 | 
					    void accept(String message);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,63 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.ps;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@NoArgsConstructor
 | 
				
			||||||
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
 | 
					public class ModelsProcessResponse {
 | 
				
			||||||
 | 
					    @JsonProperty("models")
 | 
				
			||||||
 | 
					    private List<ModelProcess> models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @NoArgsConstructor
 | 
				
			||||||
 | 
					    public static class ModelProcess {
 | 
				
			||||||
 | 
					        @JsonProperty("name")
 | 
				
			||||||
 | 
					        private String name;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("model")
 | 
				
			||||||
 | 
					        private String model;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("size")
 | 
				
			||||||
 | 
					        private long size;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("digest")
 | 
				
			||||||
 | 
					        private String digest;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("details")
 | 
				
			||||||
 | 
					        private ModelDetails details;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("expires_at")
 | 
				
			||||||
 | 
					        private String expiresAt; // Consider using LocalDateTime if you need to process date/time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("size_vram")
 | 
				
			||||||
 | 
					        private long sizeVram;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @NoArgsConstructor
 | 
				
			||||||
 | 
					    public static class ModelDetails {
 | 
				
			||||||
 | 
					        @JsonProperty("parent_model")
 | 
				
			||||||
 | 
					        private String parentModel;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("format")
 | 
				
			||||||
 | 
					        private String format;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("family")
 | 
				
			||||||
 | 
					        private String family;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("families")
 | 
				
			||||||
 | 
					        private List<String> families;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("parameter_size")
 | 
				
			||||||
 | 
					        private String parameterSize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @JsonProperty("quantization_level")
 | 
				
			||||||
 | 
					        private String quantizationLevel;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
@@ -1,14 +1,13 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.ollama4j.models.chat.OllamaChatResponseModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.Map;
 | 
					import java.util.Map;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonInclude;
 | 
					import com.fasterxml.jackson.annotation.JsonInclude;
 | 
				
			||||||
@@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			|||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
					import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
 | 
					import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
					@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
				
			||||||
public abstract class OllamaCommonRequestModel {
 | 
					public abstract class OllamaCommonRequest {
 | 
				
			||||||
  
 | 
					  
 | 
				
			||||||
  protected String model;  
 | 
					  protected String model;  
 | 
				
			||||||
  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
					  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
				
			||||||
@@ -1,12 +1,11 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.ollama4j.models.response.OllamaErrorResponse;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -78,19 +77,19 @@ public abstract class OllamaEndpointCaller {
 | 
				
			|||||||
            while ((line = reader.readLine()) != null) {
 | 
					            while ((line = reader.readLine()) != null) {
 | 
				
			||||||
                if (statusCode == 404) {
 | 
					                if (statusCode == 404) {
 | 
				
			||||||
                    LOG.warn("Status code: 404 (Not Found)");
 | 
					                    LOG.warn("Status code: 404 (Not Found)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
					                    OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
                            Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class);
 | 
					                            Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else if (statusCode == 401) {
 | 
					                } else if (statusCode == 401) {
 | 
				
			||||||
                    LOG.warn("Status code: 401 (Unauthorized)");
 | 
					                    LOG.warn("Status code: 401 (Unauthorized)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel =
 | 
					                    OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
                            Utils.getObjectMapper()
 | 
					                            Utils.getObjectMapper()
 | 
				
			||||||
                                    .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class);
 | 
					                                    .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else if (statusCode == 400) {
 | 
					                } else if (statusCode == 400) {
 | 
				
			||||||
                    LOG.warn("Status code: 400 (Bad Request)");
 | 
					                    LOG.warn("Status code: 400 (Bad Request)");
 | 
				
			||||||
                    OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line,
 | 
					                    OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line,
 | 
				
			||||||
                            OllamaErrorResponseModel.class);
 | 
					                            OllamaErrorResponse.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getError());
 | 
					                    responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
                } else {
 | 
					                } else {
 | 
				
			||||||
                    boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
 | 
					                    boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
 | 
				
			||||||
@@ -0,0 +1,51 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateStreamObserver streamObserver;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
				
			||||||
 | 
					        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    protected String getEndpointSuffix() {
 | 
				
			||||||
 | 
					        return "/api/generate";
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
 | 
					            responseBuffer.append(ollamaResponseModel.getResponse());
 | 
				
			||||||
 | 
					            if (streamObserver != null) {
 | 
				
			||||||
 | 
					                streamObserver.notify(ollamaResponseModel);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            return ollamaResponseModel.isDone();
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            LOG.error("Error parsing the Ollama chat response!", e);
 | 
				
			||||||
 | 
					            return true;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					        streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
				
			||||||
 | 
					        return super.callSync(body);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,7 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -1,10 +1,10 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.time.LocalDateTime;
 | 
					import java.time.OffsetDateTime;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -13,9 +13,9 @@ public class Model {
 | 
				
			|||||||
  private String name;
 | 
					  private String name;
 | 
				
			||||||
  private String model;
 | 
					  private String model;
 | 
				
			||||||
  @JsonProperty("modified_at")
 | 
					  @JsonProperty("modified_at")
 | 
				
			||||||
  private LocalDateTime modifiedAt;
 | 
					  private OffsetDateTime modifiedAt;
 | 
				
			||||||
  @JsonProperty("expires_at")
 | 
					  @JsonProperty("expires_at")
 | 
				
			||||||
  private LocalDateTime expiresAt;
 | 
					  private OffsetDateTime expiresAt;
 | 
				
			||||||
  private String digest;
 | 
					  private String digest;
 | 
				
			||||||
  private long size;
 | 
					  private long size;
 | 
				
			||||||
  @JsonProperty("details")
 | 
					  @JsonProperty("details")
 | 
				
			||||||
@@ -1,9 +1,9 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -1,9 +1,9 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -0,0 +1,123 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.EqualsAndHashCode;
 | 
				
			||||||
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.BufferedReader;
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					import java.io.InputStream;
 | 
				
			||||||
 | 
					import java.io.InputStreamReader;
 | 
				
			||||||
 | 
					import java.net.http.HttpClient;
 | 
				
			||||||
 | 
					import java.net.http.HttpRequest;
 | 
				
			||||||
 | 
					import java.net.http.HttpResponse;
 | 
				
			||||||
 | 
					import java.nio.charset.StandardCharsets;
 | 
				
			||||||
 | 
					import java.time.Duration;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@EqualsAndHashCode(callSuper = true)
 | 
				
			||||||
 | 
					@SuppressWarnings("unused")
 | 
				
			||||||
 | 
					public class OllamaAsyncResultStreamer extends Thread {
 | 
				
			||||||
 | 
					    private final HttpRequest.Builder requestBuilder;
 | 
				
			||||||
 | 
					    private final OllamaGenerateRequest ollamaRequestModel;
 | 
				
			||||||
 | 
					    private final OllamaResultStream stream = new OllamaResultStream();
 | 
				
			||||||
 | 
					    private String completeResponse;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a
 | 
				
			||||||
 | 
					     * failure. If the request was a failure, the `getResponse()` method will return the error
 | 
				
			||||||
 | 
					     * message.
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    @Getter
 | 
				
			||||||
 | 
					    private boolean succeeded;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Setter
 | 
				
			||||||
 | 
					    private long requestTimeoutSeconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama
 | 
				
			||||||
 | 
					     * server.
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    @Getter
 | 
				
			||||||
 | 
					    private int httpStatusCode;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					     * -- GETTER -- Returns the response time in milliseconds.
 | 
				
			||||||
 | 
					     */
 | 
				
			||||||
 | 
					    @Getter
 | 
				
			||||||
 | 
					    private long responseTime = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaAsyncResultStreamer(
 | 
				
			||||||
 | 
					            HttpRequest.Builder requestBuilder,
 | 
				
			||||||
 | 
					            OllamaGenerateRequest ollamaRequestModel,
 | 
				
			||||||
 | 
					            long requestTimeoutSeconds) {
 | 
				
			||||||
 | 
					        this.requestBuilder = requestBuilder;
 | 
				
			||||||
 | 
					        this.ollamaRequestModel = ollamaRequestModel;
 | 
				
			||||||
 | 
					        this.completeResponse = "";
 | 
				
			||||||
 | 
					        this.stream.add("");
 | 
				
			||||||
 | 
					        this.requestTimeoutSeconds = requestTimeoutSeconds;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public void run() {
 | 
				
			||||||
 | 
					        ollamaRequestModel.setStream(true);
 | 
				
			||||||
 | 
					        HttpClient httpClient = HttpClient.newHttpClient();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            long startTime = System.currentTimeMillis();
 | 
				
			||||||
 | 
					            HttpRequest request =
 | 
				
			||||||
 | 
					                    requestBuilder
 | 
				
			||||||
 | 
					                            .POST(
 | 
				
			||||||
 | 
					                                    HttpRequest.BodyPublishers.ofString(
 | 
				
			||||||
 | 
					                                            Utils.getObjectMapper().writeValueAsString(ollamaRequestModel)))
 | 
				
			||||||
 | 
					                            .header("Content-Type", "application/json")
 | 
				
			||||||
 | 
					                            .timeout(Duration.ofSeconds(requestTimeoutSeconds))
 | 
				
			||||||
 | 
					                            .build();
 | 
				
			||||||
 | 
					            HttpResponse<InputStream> response =
 | 
				
			||||||
 | 
					                    httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
 | 
				
			||||||
 | 
					            int statusCode = response.statusCode();
 | 
				
			||||||
 | 
					            this.httpStatusCode = statusCode;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            InputStream responseBodyStream = response.body();
 | 
				
			||||||
 | 
					            try (BufferedReader reader =
 | 
				
			||||||
 | 
					                         new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
 | 
				
			||||||
 | 
					                String line;
 | 
				
			||||||
 | 
					                StringBuilder responseBuffer = new StringBuilder();
 | 
				
			||||||
 | 
					                while ((line = reader.readLine()) != null) {
 | 
				
			||||||
 | 
					                    if (statusCode == 404) {
 | 
				
			||||||
 | 
					                        OllamaErrorResponse ollamaResponseModel =
 | 
				
			||||||
 | 
					                                Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
 | 
				
			||||||
 | 
					                        stream.add(ollamaResponseModel.getError());
 | 
				
			||||||
 | 
					                        responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
 | 
					                    } else {
 | 
				
			||||||
 | 
					                        OllamaGenerateResponseModel ollamaResponseModel =
 | 
				
			||||||
 | 
					                                Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
 | 
					                        String res = ollamaResponseModel.getResponse();
 | 
				
			||||||
 | 
					                        stream.add(res);
 | 
				
			||||||
 | 
					                        if (!ollamaResponseModel.isDone()) {
 | 
				
			||||||
 | 
					                            responseBuffer.append(res);
 | 
				
			||||||
 | 
					                        }
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
 | 
					                }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                this.succeeded = true;
 | 
				
			||||||
 | 
					                this.completeResponse = responseBuffer.toString();
 | 
				
			||||||
 | 
					                long endTime = System.currentTimeMillis();
 | 
				
			||||||
 | 
					                responseTime = endTime - startTime;
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            if (statusCode != 200) {
 | 
				
			||||||
 | 
					                throw new OllamaBaseException(this.completeResponse);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					        } catch (IOException | InterruptedException | OllamaBaseException e) {
 | 
				
			||||||
 | 
					            this.succeeded = false;
 | 
				
			||||||
 | 
					            this.completeResponse = "[FAILED] " + e.getMessage();
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,11 +1,11 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonIgnoreProperties(ignoreUnknown = true)
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
public class OllamaErrorResponseModel {
 | 
					public class OllamaErrorResponse {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private String error;
 | 
					  private String error;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import static io.github.ollama4j.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -0,0 +1,18 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.models.response;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.Iterator;
 | 
				
			||||||
 | 
					import java.util.LinkedList;
 | 
				
			||||||
 | 
					import java.util.Queue;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaResultStream extends LinkedList<String> implements Queue<String> {
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public String poll() {
 | 
				
			||||||
 | 
					        StringBuilder tokens = new StringBuilder();
 | 
				
			||||||
 | 
					        Iterator<String> iterator = this.listIterator();
 | 
				
			||||||
 | 
					        while (iterator.hasNext()) {
 | 
				
			||||||
 | 
					            tokens.append(iterator.next());
 | 
				
			||||||
 | 
					            iterator.remove();
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        return tokens.toString();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,35 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@NoArgsConstructor
 | 
				
			||||||
 | 
					@AllArgsConstructor
 | 
				
			||||||
 | 
					public class OllamaToolsResult {
 | 
				
			||||||
 | 
					    private OllamaResult modelResult;
 | 
				
			||||||
 | 
					    private Map<ToolFunctionCallSpec, Object> toolResults;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public List<ToolResult> getToolResults() {
 | 
				
			||||||
 | 
					        List<ToolResult> results = new ArrayList<>();
 | 
				
			||||||
 | 
					        for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) {
 | 
				
			||||||
 | 
					            results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue()));
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        return results;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @NoArgsConstructor
 | 
				
			||||||
 | 
					    @AllArgsConstructor
 | 
				
			||||||
 | 
					    public static class ToolResult {
 | 
				
			||||||
 | 
					        private String functionName;
 | 
				
			||||||
 | 
					        private Map<String, Object> functionArguments;
 | 
				
			||||||
 | 
					        private Object result;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
							
								
								
									
										8
									
								
								src/main/java/io/github/ollama4j/tools/ToolFunction.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								src/main/java/io/github/ollama4j/tools/ToolFunction.java
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,8 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@FunctionalInterface
 | 
				
			||||||
 | 
					public interface ToolFunction {
 | 
				
			||||||
 | 
					    Object apply(Map<String, Object> arguments);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,16 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import lombok.AllArgsConstructor;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@AllArgsConstructor
 | 
				
			||||||
 | 
					@NoArgsConstructor
 | 
				
			||||||
 | 
					public class ToolFunctionCallSpec {
 | 
				
			||||||
 | 
					    private String name;
 | 
				
			||||||
 | 
					    private Map<String, Object> arguments;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
							
								
								
									
										16
									
								
								src/main/java/io/github/ollama4j/tools/ToolRegistry.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								src/main/java/io/github/ollama4j/tools/ToolRegistry.java
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,16 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.HashMap;
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class ToolRegistry {
 | 
				
			||||||
 | 
					    private final Map<String, ToolFunction> functionMap = new HashMap<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public ToolFunction getFunction(String name) {
 | 
				
			||||||
 | 
					        return functionMap.get(name);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void addFunction(String name, ToolFunction function) {
 | 
				
			||||||
 | 
					        functionMap.put(name, function);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
							
								
								
									
										113
									
								
								src/main/java/io/github/ollama4j/tools/Tools.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								src/main/java/io/github/ollama4j/tools/Tools.java
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,113 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.tools;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonIgnore;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonInclude;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					import lombok.Builder;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.HashMap;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class Tools {
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @Builder
 | 
				
			||||||
 | 
					    public static class ToolSpecification {
 | 
				
			||||||
 | 
					        private String functionName;
 | 
				
			||||||
 | 
					        private String functionDescription;
 | 
				
			||||||
 | 
					        private Map<String, PromptFuncDefinition.Property> properties;
 | 
				
			||||||
 | 
					        private ToolFunction toolDefinition;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Data
 | 
				
			||||||
 | 
					    @JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
 | 
					    public static class PromptFuncDefinition {
 | 
				
			||||||
 | 
					        private String type;
 | 
				
			||||||
 | 
					        private PromptFuncSpec function;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @Data
 | 
				
			||||||
 | 
					        public static class PromptFuncSpec {
 | 
				
			||||||
 | 
					            private String name;
 | 
				
			||||||
 | 
					            private String description;
 | 
				
			||||||
 | 
					            private Parameters parameters;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @Data
 | 
				
			||||||
 | 
					        public static class Parameters {
 | 
				
			||||||
 | 
					            private String type;
 | 
				
			||||||
 | 
					            private Map<String, Property> properties;
 | 
				
			||||||
 | 
					            private List<String> required;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @Data
 | 
				
			||||||
 | 
					        @Builder
 | 
				
			||||||
 | 
					        public static class Property {
 | 
				
			||||||
 | 
					            private String type;
 | 
				
			||||||
 | 
					            private String description;
 | 
				
			||||||
 | 
					            @JsonProperty("enum")
 | 
				
			||||||
 | 
					            @JsonInclude(JsonInclude.Include.NON_NULL)
 | 
				
			||||||
 | 
					            private List<String> enumValues;
 | 
				
			||||||
 | 
					            @JsonIgnore
 | 
				
			||||||
 | 
					            private boolean required;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static class PropsBuilder {
 | 
				
			||||||
 | 
					        private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) {
 | 
				
			||||||
 | 
					            props.put(key, property);
 | 
				
			||||||
 | 
					            return this;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        public Map<String, PromptFuncDefinition.Property> build() {
 | 
				
			||||||
 | 
					            return props;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static class PromptBuilder {
 | 
				
			||||||
 | 
					        private final List<PromptFuncDefinition> tools = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        private String promptText;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        public String build() throws JsonProcessingException {
 | 
				
			||||||
 | 
					            return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]";
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        public PromptBuilder withPrompt(String prompt) throws JsonProcessingException {
 | 
				
			||||||
 | 
					            promptText = prompt;
 | 
				
			||||||
 | 
					            return this;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        public PromptBuilder withToolSpecification(ToolSpecification spec) {
 | 
				
			||||||
 | 
					            PromptFuncDefinition def = new PromptFuncDefinition();
 | 
				
			||||||
 | 
					            def.setType("function");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec();
 | 
				
			||||||
 | 
					            functionDetail.setName(spec.getFunctionName());
 | 
				
			||||||
 | 
					            functionDetail.setDescription(spec.getFunctionDescription());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters();
 | 
				
			||||||
 | 
					            parameters.setType("object");
 | 
				
			||||||
 | 
					            parameters.setProperties(spec.getProperties());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            List<String> requiredValues = new ArrayList<>();
 | 
				
			||||||
 | 
					            for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) {
 | 
				
			||||||
 | 
					                if (p.getValue().isRequired()) {
 | 
				
			||||||
 | 
					                    requiredValues.add(p.getKey());
 | 
				
			||||||
 | 
					                }
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            parameters.setRequired(requiredValues);
 | 
				
			||||||
 | 
					            functionDetail.setParameters(parameters);
 | 
				
			||||||
 | 
					            def.setFunction(functionDetail);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            tools.add(def);
 | 
				
			||||||
 | 
					            return this;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.types;
 | 
					package io.github.ollama4j.types;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * A class to provide constants for all the supported models by Ollama.
 | 
					 * A class to provide constants for all the supported models by Ollama.
 | 
				
			||||||
@@ -9,6 +9,9 @@ package io.github.amithkoujalgi.ollama4j.core.types;
 | 
				
			|||||||
@SuppressWarnings("ALL")
 | 
					@SuppressWarnings("ALL")
 | 
				
			||||||
public class OllamaModelType {
 | 
					public class OllamaModelType {
 | 
				
			||||||
    public static final String GEMMA = "gemma";
 | 
					    public static final String GEMMA = "gemma";
 | 
				
			||||||
 | 
					    public static final String GEMMA2 = "gemma2";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public static final String LLAMA2 = "llama2";
 | 
					    public static final String LLAMA2 = "llama2";
 | 
				
			||||||
    public static final String LLAMA3 = "llama3";
 | 
					    public static final String LLAMA3 = "llama3";
 | 
				
			||||||
    public static final String MISTRAL = "mistral";
 | 
					    public static final String MISTRAL = "mistral";
 | 
				
			||||||
@@ -30,6 +33,8 @@ public class OllamaModelType {
 | 
				
			|||||||
    public static final String ZEPHYR = "zephyr";
 | 
					    public static final String ZEPHYR = "zephyr";
 | 
				
			||||||
    public static final String OPENHERMES = "openhermes";
 | 
					    public static final String OPENHERMES = "openhermes";
 | 
				
			||||||
    public static final String QWEN = "qwen";
 | 
					    public static final String QWEN = "qwen";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static final String QWEN2 = "qwen2";
 | 
				
			||||||
    public static final String WIZARDCODER = "wizardcoder";
 | 
					    public static final String WIZARDCODER = "wizardcoder";
 | 
				
			||||||
    public static final String LLAMA2_CHINESE = "llama2-chinese";
 | 
					    public static final String LLAMA2_CHINESE = "llama2-chinese";
 | 
				
			||||||
    public static final String TINYLLAMA = "tinyllama";
 | 
					    public static final String TINYLLAMA = "tinyllama";
 | 
				
			||||||
@@ -79,4 +84,5 @@ public class OllamaModelType {
 | 
				
			|||||||
    public static final String NOTUS = "notus";
 | 
					    public static final String NOTUS = "notus";
 | 
				
			||||||
    public static final String DUCKDB_NSQL = "duckdb-nsql";
 | 
					    public static final String DUCKDB_NSQL = "duckdb-nsql";
 | 
				
			||||||
    public static final String ALL_MINILM = "all-minilm";
 | 
					    public static final String ALL_MINILM = "all-minilm";
 | 
				
			||||||
 | 
					    public static final String CODESTRAL = "codestral";
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
import java.util.Base64;
 | 
					import java.util.Base64;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.net.http.HttpRequest.BodyPublisher;
 | 
					import java.net.http.HttpRequest.BodyPublisher;
 | 
				
			||||||
import java.net.http.HttpRequest.BodyPublishers;
 | 
					import java.net.http.HttpRequest.BodyPublishers;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.Map;
 | 
					import java.util.Map;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.HashMap;
 | 
					import java.util.HashMap;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It
 | 
					 * The {@code PromptBuilder} class is used to construct prompt texts for language models (LLMs). It
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.InputStream;
 | 
					import java.io.InputStream;
 | 
				
			||||||
import java.util.Scanner;
 | 
					import java.util.Scanner;
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.ollama4j.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.ByteArrayOutputStream;
 | 
					import java.io.ByteArrayOutputStream;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
@@ -1,393 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.integrationtests;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.*;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					 | 
				
			||||||
import java.io.File;
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					 | 
				
			||||||
import java.io.InputStream;
 | 
					 | 
				
			||||||
import java.net.ConnectException;
 | 
					 | 
				
			||||||
import java.net.URISyntaxException;
 | 
					 | 
				
			||||||
import java.net.http.HttpConnectTimeoutException;
 | 
					 | 
				
			||||||
import java.util.List;
 | 
					 | 
				
			||||||
import java.util.Objects;
 | 
					 | 
				
			||||||
import java.util.Properties;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					 | 
				
			||||||
import org.junit.jupiter.api.Order;
 | 
					 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					 | 
				
			||||||
import org.slf4j.Logger;
 | 
					 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class TestRealAPIs {
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  OllamaAPI ollamaAPI;
 | 
					 | 
				
			||||||
  Config config;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private File getImageFileFromClasspath(String fileName) {
 | 
					 | 
				
			||||||
    ClassLoader classLoader = getClass().getClassLoader();
 | 
					 | 
				
			||||||
    return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @BeforeEach
 | 
					 | 
				
			||||||
  void setUp() {
 | 
					 | 
				
			||||||
    config = new Config();
 | 
					 | 
				
			||||||
    ollamaAPI = new OllamaAPI(config.getOllamaURL());
 | 
					 | 
				
			||||||
    ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(1)
 | 
					 | 
				
			||||||
  void testWrongEndpoint() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
 | 
					 | 
				
			||||||
    assertThrows(ConnectException.class, ollamaAPI::listModels);
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(1)
 | 
					 | 
				
			||||||
  void testEndpointReachability() {
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      assertNotNull(ollamaAPI.listModels());
 | 
					 | 
				
			||||||
    } catch (HttpConnectTimeoutException e) {
 | 
					 | 
				
			||||||
      fail(e.getMessage());
 | 
					 | 
				
			||||||
    } catch (Exception e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(2)
 | 
					 | 
				
			||||||
  void testListModels() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      assertNotNull(ollamaAPI.listModels());
 | 
					 | 
				
			||||||
      ollamaAPI.listModels().forEach(System.out::println);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(2)
 | 
					 | 
				
			||||||
  void testPullModel() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      ollamaAPI.pullModel(config.getModel());
 | 
					 | 
				
			||||||
      boolean found =
 | 
					 | 
				
			||||||
          ollamaAPI.listModels().stream()
 | 
					 | 
				
			||||||
              .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
					 | 
				
			||||||
      assertTrue(found);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testListDtails() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
					 | 
				
			||||||
      assertNotNull(modelDetails);
 | 
					 | 
				
			||||||
      System.out.println(modelDetails);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithDefaultOptions() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaResult result =
 | 
					 | 
				
			||||||
          ollamaAPI.generate(
 | 
					 | 
				
			||||||
              config.getModel(),
 | 
					 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					 | 
				
			||||||
              new OptionsBuilder().build());
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithDefaultOptionsStreamed() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      StringBuffer sb = new StringBuffer("");
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
					 | 
				
			||||||
          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					 | 
				
			||||||
          new OptionsBuilder().build(), (s) -> {
 | 
					 | 
				
			||||||
            LOG.info(s);
 | 
					 | 
				
			||||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
					 | 
				
			||||||
            LOG.info(substring);
 | 
					 | 
				
			||||||
            sb.append(substring);
 | 
					 | 
				
			||||||
          });
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithOptions() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaResult result =
 | 
					 | 
				
			||||||
          ollamaAPI.generate(
 | 
					 | 
				
			||||||
              config.getModel(),
 | 
					 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
					 | 
				
			||||||
              new OptionsBuilder().setTemperature(0.9f).build());
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testChat() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					 | 
				
			||||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
					 | 
				
			||||||
             .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
 | 
					 | 
				
			||||||
             .withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?")
 | 
					 | 
				
			||||||
             .build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
					 | 
				
			||||||
      assertEquals(4,chatResult.getChatHistory().size());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testChatWithSystemPrompt() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					 | 
				
			||||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
 | 
					 | 
				
			||||||
          "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
					 | 
				
			||||||
          .withMessage(OllamaChatMessageRole.USER,
 | 
					 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					 | 
				
			||||||
          .build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
					 | 
				
			||||||
      assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
					 | 
				
			||||||
      assertEquals(3, chatResult.getChatHistory().size());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testChatWithStream() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
					 | 
				
			||||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
					 | 
				
			||||||
              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
					 | 
				
			||||||
          .build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      StringBuffer sb = new StringBuffer("");
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
 | 
					 | 
				
			||||||
        LOG.info(s);
 | 
					 | 
				
			||||||
        String substring = s.substring(sb.toString().length(), s.length());
 | 
					 | 
				
			||||||
        LOG.info(substring);
 | 
					 | 
				
			||||||
        sb.append(substring);
 | 
					 | 
				
			||||||
      });
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
      assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testChatWithImageFromFileWithHistoryRecognition() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaChatRequestBuilder builder =
 | 
					 | 
				
			||||||
          OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
					 | 
				
			||||||
      OllamaChatRequestModel requestModel =
 | 
					 | 
				
			||||||
          builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					 | 
				
			||||||
              List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult.getResponse());
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      builder.reset();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      requestModel =
 | 
					 | 
				
			||||||
          builder.withMessages(chatResult.getChatHistory())
 | 
					 | 
				
			||||||
            .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      chatResult = ollamaAPI.chat(requestModel);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult.getResponse());
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testChatWithImageFromURL() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
					 | 
				
			||||||
      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
					 | 
				
			||||||
      "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
 | 
					 | 
				
			||||||
             .build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithOptionsAndImageFiles() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaResult result =
 | 
					 | 
				
			||||||
          ollamaAPI.generateWithImageFiles(
 | 
					 | 
				
			||||||
              config.getImageModel(),
 | 
					 | 
				
			||||||
              "What is in this image?",
 | 
					 | 
				
			||||||
              List.of(imageFile),
 | 
					 | 
				
			||||||
              new OptionsBuilder().build());
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      StringBuffer sb = new StringBuffer("");
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
					 | 
				
			||||||
          "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
					 | 
				
			||||||
            LOG.info(s);
 | 
					 | 
				
			||||||
            String substring = s.substring(sb.toString().length(), s.length());
 | 
					 | 
				
			||||||
            LOG.info(substring);
 | 
					 | 
				
			||||||
            sb.append(substring);
 | 
					 | 
				
			||||||
          });
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  void testAskModelWithOptionsAndImageURLs() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaResult result =
 | 
					 | 
				
			||||||
          ollamaAPI.generateWithImageURLs(
 | 
					 | 
				
			||||||
              config.getImageModel(),
 | 
					 | 
				
			||||||
              "What is in this image?",
 | 
					 | 
				
			||||||
              List.of(
 | 
					 | 
				
			||||||
                  "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
 | 
					 | 
				
			||||||
              new OptionsBuilder().build());
 | 
					 | 
				
			||||||
      assertNotNull(result);
 | 
					 | 
				
			||||||
      assertNotNull(result.getResponse());
 | 
					 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  @Order(3)
 | 
					 | 
				
			||||||
  public void testEmbedding() {
 | 
					 | 
				
			||||||
    testEndpointReachability();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
 | 
					 | 
				
			||||||
          .getInstance(config.getModel(), "What is the capital of France?").build();
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      assertNotNull(embeddings);
 | 
					 | 
				
			||||||
      assertFalse(embeddings.isEmpty());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      fail(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Data
 | 
					 | 
				
			||||||
class Config {
 | 
					 | 
				
			||||||
  private String ollamaURL;
 | 
					 | 
				
			||||||
  private String model;
 | 
					 | 
				
			||||||
  private String imageModel;
 | 
					 | 
				
			||||||
  private int requestTimeoutSeconds;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public Config() {
 | 
					 | 
				
			||||||
    Properties properties = new Properties();
 | 
					 | 
				
			||||||
    try (InputStream input =
 | 
					 | 
				
			||||||
        getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
 | 
					 | 
				
			||||||
      if (input == null) {
 | 
					 | 
				
			||||||
        throw new RuntimeException("Sorry, unable to find test-config.properties");
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
      properties.load(input);
 | 
					 | 
				
			||||||
      this.ollamaURL = properties.getProperty("ollama.url");
 | 
					 | 
				
			||||||
      this.model = properties.getProperty("ollama.model");
 | 
					 | 
				
			||||||
      this.imageModel = properties.getProperty("ollama.model.image");
 | 
					 | 
				
			||||||
      this.requestTimeoutSeconds =
 | 
					 | 
				
			||||||
          Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
 | 
					 | 
				
			||||||
    } catch (IOException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException("Error loading properties", e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,163 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.unittests;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import static org.mockito.Mockito.*;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					 | 
				
			||||||
import java.net.URISyntaxException;
 | 
					 | 
				
			||||||
import java.util.ArrayList;
 | 
					 | 
				
			||||||
import java.util.Collections;
 | 
					 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					 | 
				
			||||||
import org.mockito.Mockito;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class TestMockedAPIs {
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testPullModel() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      doNothing().when(ollamaAPI).pullModel(model);
 | 
					 | 
				
			||||||
      ollamaAPI.pullModel(model);
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).pullModel(model);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testListModels() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
 | 
					 | 
				
			||||||
      ollamaAPI.listModels();
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).listModels();
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testCreateModel() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
 | 
					 | 
				
			||||||
      ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testDeleteModel() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      doNothing().when(ollamaAPI).deleteModel(model, true);
 | 
					 | 
				
			||||||
      ollamaAPI.deleteModel(model, true);
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).deleteModel(model, true);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testGetModelDetails() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
 | 
					 | 
				
			||||||
      ollamaAPI.getModelDetails(model);
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).getModelDetails(model);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testGenerateEmbeddings() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
 | 
					 | 
				
			||||||
      ollamaAPI.generateEmbeddings(model, prompt);
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testAsk() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					 | 
				
			||||||
    OptionsBuilder optionsBuilder = new OptionsBuilder();
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.generate(model, prompt, optionsBuilder.build()))
 | 
					 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					 | 
				
			||||||
      ollamaAPI.generate(model, prompt, optionsBuilder.build());
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testAskWithImageFiles() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.generateWithImageFiles(
 | 
					 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
					 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					 | 
				
			||||||
      ollamaAPI.generateWithImageFiles(
 | 
					 | 
				
			||||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1))
 | 
					 | 
				
			||||||
          .generateWithImageFiles(
 | 
					 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testAskWithImageURLs() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      when(ollamaAPI.generateWithImageURLs(
 | 
					 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
					 | 
				
			||||||
          .thenReturn(new OllamaResult("", 0, 200));
 | 
					 | 
				
			||||||
      ollamaAPI.generateWithImageURLs(
 | 
					 | 
				
			||||||
          model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					 | 
				
			||||||
      verify(ollamaAPI, times(1))
 | 
					 | 
				
			||||||
          .generateWithImageURLs(
 | 
					 | 
				
			||||||
              model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
					 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Test
 | 
					 | 
				
			||||||
  void testAskAsync() {
 | 
					 | 
				
			||||||
    OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
					 | 
				
			||||||
    String model = OllamaModelType.LLAMA2;
 | 
					 | 
				
			||||||
    String prompt = "some prompt text";
 | 
					 | 
				
			||||||
    when(ollamaAPI.generateAsync(model, prompt))
 | 
					 | 
				
			||||||
        .thenReturn(new OllamaAsyncResultCallback(null, null, 3));
 | 
					 | 
				
			||||||
    ollamaAPI.generateAsync(model, prompt);
 | 
					 | 
				
			||||||
    verify(ollamaAPI, times(1)).generateAsync(model, prompt);
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -0,0 +1,395 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.integrationtests;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Order;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.File;
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					import java.io.InputStream;
 | 
				
			||||||
 | 
					import java.net.ConnectException;
 | 
				
			||||||
 | 
					import java.net.URISyntaxException;
 | 
				
			||||||
 | 
					import java.net.http.HttpConnectTimeoutException;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					import java.util.Objects;
 | 
				
			||||||
 | 
					import java.util.Properties;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.*;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class TestRealAPIs {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    OllamaAPI ollamaAPI;
 | 
				
			||||||
 | 
					    Config config;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private File getImageFileFromClasspath(String fileName) {
 | 
				
			||||||
 | 
					        ClassLoader classLoader = getClass().getClassLoader();
 | 
				
			||||||
 | 
					        return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @BeforeEach
 | 
				
			||||||
 | 
					    void setUp() {
 | 
				
			||||||
 | 
					        config = new Config();
 | 
				
			||||||
 | 
					        ollamaAPI = new OllamaAPI(config.getOllamaURL());
 | 
				
			||||||
 | 
					        ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(1)
 | 
				
			||||||
 | 
					    void testWrongEndpoint() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
 | 
				
			||||||
 | 
					        assertThrows(ConnectException.class, ollamaAPI::listModels);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(1)
 | 
				
			||||||
 | 
					    void testEndpointReachability() {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            assertNotNull(ollamaAPI.listModels());
 | 
				
			||||||
 | 
					        } catch (HttpConnectTimeoutException e) {
 | 
				
			||||||
 | 
					            fail(e.getMessage());
 | 
				
			||||||
 | 
					        } catch (Exception e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(2)
 | 
				
			||||||
 | 
					    void testListModels() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            assertNotNull(ollamaAPI.listModels());
 | 
				
			||||||
 | 
					            ollamaAPI.listModels().forEach(System.out::println);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(2)
 | 
				
			||||||
 | 
					    void testPullModel() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            ollamaAPI.pullModel(config.getModel());
 | 
				
			||||||
 | 
					            boolean found =
 | 
				
			||||||
 | 
					                    ollamaAPI.listModels().stream()
 | 
				
			||||||
 | 
					                            .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
				
			||||||
 | 
					            assertTrue(found);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testListDtails() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
				
			||||||
 | 
					            assertNotNull(modelDetails);
 | 
				
			||||||
 | 
					            System.out.println(modelDetails);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithDefaultOptions() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaResult result =
 | 
				
			||||||
 | 
					                    ollamaAPI.generate(
 | 
				
			||||||
 | 
					                            config.getModel(),
 | 
				
			||||||
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					                            false,
 | 
				
			||||||
 | 
					                            new OptionsBuilder().build());
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithDefaultOptionsStreamed() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					            OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
				
			||||||
 | 
					                    "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					                    false,
 | 
				
			||||||
 | 
					                    new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					                        LOG.info(s);
 | 
				
			||||||
 | 
					                        String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					                        LOG.info(substring);
 | 
				
			||||||
 | 
					                        sb.append(substring);
 | 
				
			||||||
 | 
					                    });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					            assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithOptions() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaResult result =
 | 
				
			||||||
 | 
					                    ollamaAPI.generate(
 | 
				
			||||||
 | 
					                            config.getModel(),
 | 
				
			||||||
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					                            true,
 | 
				
			||||||
 | 
					                            new OptionsBuilder().setTemperature(0.9f).build());
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testChat() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
 | 
				
			||||||
 | 
					                    .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!")
 | 
				
			||||||
 | 
					                    .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?")
 | 
				
			||||||
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					            assertFalse(chatResult.getResponse().isBlank());
 | 
				
			||||||
 | 
					            assertEquals(4, chatResult.getChatHistory().size());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testChatWithSystemPrompt() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM,
 | 
				
			||||||
 | 
					                            "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
 | 
				
			||||||
 | 
					                    .withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					            assertFalse(chatResult.getResponse().isBlank());
 | 
				
			||||||
 | 
					            assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
				
			||||||
 | 
					            assertEquals(3, chatResult.getChatHistory().size());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testChatWithStream() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					                            "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> {
 | 
				
			||||||
 | 
					                LOG.info(s);
 | 
				
			||||||
 | 
					                String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					                LOG.info(substring);
 | 
				
			||||||
 | 
					                sb.append(substring);
 | 
				
			||||||
 | 
					            });
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					            assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testChatWithImageFromFileWithHistoryRecognition() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaChatRequestBuilder builder =
 | 
				
			||||||
 | 
					                    OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
				
			||||||
 | 
					            OllamaChatRequest requestModel =
 | 
				
			||||||
 | 
					                    builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
 | 
					                            List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult.getResponse());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            builder.reset();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            requestModel =
 | 
				
			||||||
 | 
					                    builder.withMessages(chatResult.getChatHistory())
 | 
				
			||||||
 | 
					                            .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult.getResponse());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testChatWithImageFromURL() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel());
 | 
				
			||||||
 | 
					            OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
 | 
				
			||||||
 | 
					                            "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")
 | 
				
			||||||
 | 
					                    .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
 | 
					            assertNotNull(chatResult);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithOptionsAndImageFiles() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaResult result =
 | 
				
			||||||
 | 
					                    ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
 | 
					                            config.getImageModel(),
 | 
				
			||||||
 | 
					                            "What is in this image?",
 | 
				
			||||||
 | 
					                            List.of(imageFile),
 | 
				
			||||||
 | 
					                            new OptionsBuilder().build());
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
				
			||||||
 | 
					                    "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					                        LOG.info(s);
 | 
				
			||||||
 | 
					                        String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					                        LOG.info(substring);
 | 
				
			||||||
 | 
					                        sb.append(substring);
 | 
				
			||||||
 | 
					                    });
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					            assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    void testAskModelWithOptionsAndImageURLs() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaResult result =
 | 
				
			||||||
 | 
					                    ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
 | 
					                            config.getImageModel(),
 | 
				
			||||||
 | 
					                            "What is in this image?",
 | 
				
			||||||
 | 
					                            List.of(
 | 
				
			||||||
 | 
					                                    "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
 | 
				
			||||||
 | 
					                            new OptionsBuilder().build());
 | 
				
			||||||
 | 
					            assertNotNull(result);
 | 
				
			||||||
 | 
					            assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					            assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    @Order(3)
 | 
				
			||||||
 | 
					    public void testEmbedding() {
 | 
				
			||||||
 | 
					        testEndpointReachability();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
 | 
				
			||||||
 | 
					                    .getInstance(config.getModel(), "What is the capital of France?").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            assertNotNull(embeddings);
 | 
				
			||||||
 | 
					            assertFalse(embeddings.isEmpty());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            fail(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					class Config {
 | 
				
			||||||
 | 
					    private String ollamaURL;
 | 
				
			||||||
 | 
					    private String model;
 | 
				
			||||||
 | 
					    private String imageModel;
 | 
				
			||||||
 | 
					    private int requestTimeoutSeconds;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public Config() {
 | 
				
			||||||
 | 
					        Properties properties = new Properties();
 | 
				
			||||||
 | 
					        try (InputStream input =
 | 
				
			||||||
 | 
					                     getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
 | 
				
			||||||
 | 
					            if (input == null) {
 | 
				
			||||||
 | 
					                throw new RuntimeException("Sorry, unable to find test-config.properties");
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            properties.load(input);
 | 
				
			||||||
 | 
					            this.ollamaURL = properties.getProperty("ollama.url");
 | 
				
			||||||
 | 
					            this.model = properties.getProperty("ollama.model");
 | 
				
			||||||
 | 
					            this.imageModel = properties.getProperty("ollama.model.image");
 | 
				
			||||||
 | 
					            this.requestTimeoutSeconds =
 | 
				
			||||||
 | 
					                    Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
 | 
				
			||||||
 | 
					        } catch (IOException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException("Error loading properties", e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
							
								
								
									
										164
									
								
								src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										164
									
								
								src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,164 @@
 | 
				
			|||||||
 | 
					package io.github.ollama4j.unittests;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.OllamaAPI;
 | 
				
			||||||
 | 
					import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.ModelDetail;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.response.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.ollama4j.types.OllamaModelType;
 | 
				
			||||||
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					import org.mockito.Mockito;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					import java.net.URISyntaxException;
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.Collections;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.mockito.Mockito.*;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class TestMockedAPIs {
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testPullModel() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            doNothing().when(ollamaAPI).pullModel(model);
 | 
				
			||||||
 | 
					            ollamaAPI.pullModel(model);
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).pullModel(model);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testListModels() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
 | 
				
			||||||
 | 
					            ollamaAPI.listModels();
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).listModels();
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testCreateModel() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros.";
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath);
 | 
				
			||||||
 | 
					            ollamaAPI.createModelWithModelFileContents(model, modelFilePath);
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testDeleteModel() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            doNothing().when(ollamaAPI).deleteModel(model, true);
 | 
				
			||||||
 | 
					            ollamaAPI.deleteModel(model, true);
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).deleteModel(model, true);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testGetModelDetails() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
 | 
				
			||||||
 | 
					            ollamaAPI.getModelDetails(model);
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).getModelDetails(model);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testGenerateEmbeddings() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String prompt = "some prompt text";
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>());
 | 
				
			||||||
 | 
					            ollamaAPI.generateEmbeddings(model, prompt);
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt);
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testAsk() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String prompt = "some prompt text";
 | 
				
			||||||
 | 
					        OptionsBuilder optionsBuilder = new OptionsBuilder();
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build()))
 | 
				
			||||||
 | 
					                    .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
 | 
					            ollamaAPI.generate(model, prompt, false, optionsBuilder.build());
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testAskWithImageFiles() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String prompt = "some prompt text";
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
 | 
					                    model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
				
			||||||
 | 
					                    .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
 | 
					            ollamaAPI.generateWithImageFiles(
 | 
				
			||||||
 | 
					                    model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1))
 | 
				
			||||||
 | 
					                    .generateWithImageFiles(
 | 
				
			||||||
 | 
					                            model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testAskWithImageURLs() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String prompt = "some prompt text";
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            when(ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
 | 
					                    model, prompt, Collections.emptyList(), new OptionsBuilder().build()))
 | 
				
			||||||
 | 
					                    .thenReturn(new OllamaResult("", 0, 200));
 | 
				
			||||||
 | 
					            ollamaAPI.generateWithImageURLs(
 | 
				
			||||||
 | 
					                    model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
 | 
					            verify(ollamaAPI, times(1))
 | 
				
			||||||
 | 
					                    .generateWithImageURLs(
 | 
				
			||||||
 | 
					                            model, prompt, Collections.emptyList(), new OptionsBuilder().build());
 | 
				
			||||||
 | 
					        } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					            throw new RuntimeException(e);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    void testAskAsync() {
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
				
			||||||
 | 
					        String model = OllamaModelType.LLAMA2;
 | 
				
			||||||
 | 
					        String prompt = "some prompt text";
 | 
				
			||||||
 | 
					        when(ollamaAPI.generateAsync(model, prompt, false))
 | 
				
			||||||
 | 
					                .thenReturn(new OllamaAsyncResultStreamer(null, null, 3));
 | 
				
			||||||
 | 
					        ollamaAPI.generateAsync(model, prompt, false);
 | 
				
			||||||
 | 
					        verify(ollamaAPI, times(1)).generateAsync(model, prompt, false);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,35 +1,35 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
					package io.github.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.fail;
 | 
					import static org.junit.jupiter.api.Assertions.fail;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
					import com.fasterxml.jackson.databind.ObjectMapper;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.ollama4j.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public abstract class AbstractRequestSerializationTest<T> {
 | 
					public abstract class AbstractSerializationTest<T> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected ObjectMapper mapper = Utils.getObjectMapper();
 | 
					    protected ObjectMapper mapper = Utils.getObjectMapper();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected String serializeRequest(T req) {
 | 
					    protected String serialize(T obj) {
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            return mapper.writeValueAsString(req);
 | 
					            return mapper.writeValueAsString(obj);
 | 
				
			||||||
        } catch (JsonProcessingException e) {
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
            fail("Could not serialize request!", e);
 | 
					            fail("Could not serialize request!", e);
 | 
				
			||||||
            return null;
 | 
					            return null;
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
 | 
					    protected T deserialize(String jsonObject, Class<T> deserializationClass) {
 | 
				
			||||||
        try {
 | 
					        try {
 | 
				
			||||||
            return mapper.readValue(jsonRequest, requestClass);
 | 
					            return mapper.readValue(jsonObject, deserializationClass);
 | 
				
			||||||
        } catch (JsonProcessingException e) {
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
            fail("Could not deserialize jsonRequest!", e);
 | 
					            fail("Could not deserialize jsonObject!", e);
 | 
				
			||||||
            return null;
 | 
					            return null;
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
 | 
					    protected void assertEqualsAfterUnmarshalling(T unmarshalledObject,
 | 
				
			||||||
        T req) {
 | 
					        T req) {
 | 
				
			||||||
        assertEquals(req, unmarshalledRequest);
 | 
					        assertEquals(req, unmarshalledObject);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,20 +1,20 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
					package io.github.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequest;
 | 
				
			||||||
import org.json.JSONObject;
 | 
					import org.json.JSONObject;
 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
					import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
 | 
					public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequest> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaChatRequestBuilder builder;
 | 
					    private OllamaChatRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -25,32 +25,32 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestOnlyMandatoryFields() {
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestMultipleMessages() {
 | 
					    public void testRequestMultipleMessages() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
				
			||||||
        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
        .build();
 | 
					        .build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithMessageAndImage() {
 | 
					    public void testRequestWithMessageAndImage() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
				
			||||||
                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
					                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithOptions() {
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
        OptionsBuilder b = new OptionsBuilder();
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
            .withOptions(b.setMirostat(1).build())
 | 
					            .withOptions(b.setMirostat(1).build())
 | 
				
			||||||
            .withOptions(b.setTemperature(1L).build())
 | 
					            .withOptions(b.setTemperature(1L).build())
 | 
				
			||||||
            .withOptions(b.setMirostatEta(1L).build())
 | 
					            .withOptions(b.setMirostatEta(1L).build())
 | 
				
			||||||
@@ -61,8 +61,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
 | 
				
			|||||||
            .withOptions(b.setTopP(1).build())
 | 
					            .withOptions(b.setTopP(1).build())
 | 
				
			||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class);
 | 
					        OllamaChatRequest deserializeRequest = deserialize(jsonRequest, OllamaChatRequest.class);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
				
			||||||
@@ -76,10 +76,10 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithJsonFormat() {
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
					        OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
                .withGetJsonResponse().build();
 | 
					                .withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
        // of request is never used in real code anyways
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
@@ -89,25 +89,25 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithTemplate() {
 | 
					    public void testWithTemplate() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withTemplate("System Template")
 | 
					        OllamaChatRequest req = builder.withTemplate("System Template")
 | 
				
			||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithStreaming() {
 | 
					    public void testWithStreaming() {
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withStreaming().build();
 | 
					        OllamaChatRequest req = builder.withStreaming().build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
 | 
					        assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).isStream(), true);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithKeepAlive() {
 | 
					    public void testWithKeepAlive() {
 | 
				
			||||||
        String expectedKeepAlive = "5m";
 | 
					        String expectedKeepAlive = "5m";
 | 
				
			||||||
        OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
 | 
					        OllamaChatRequest req = builder.withKeepAlive(expectedKeepAlive)
 | 
				
			||||||
            .build();
 | 
					            .build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
 | 
					        assertEquals(deserialize(jsonRequest, OllamaChatRequest.class).getKeepAlive(), expectedKeepAlive);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,13 +1,13 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
					package io.github.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
					import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
 | 
					public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        private OllamaEmbeddingsRequestBuilder builder;
 | 
					        private OllamaEmbeddingsRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
 | 
				
			|||||||
            @Test
 | 
					            @Test
 | 
				
			||||||
    public void testRequestOnlyMandatoryFields() {
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
        OllamaEmbeddingsRequestModel req = builder.build();
 | 
					        OllamaEmbeddingsRequestModel req = builder.build();
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        @Test
 | 
					        @Test
 | 
				
			||||||
@@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa
 | 
				
			|||||||
            OllamaEmbeddingsRequestModel req = builder
 | 
					            OllamaEmbeddingsRequestModel req = builder
 | 
				
			||||||
                    .withOptions(b.setMirostat(1).build()).build();
 | 
					                    .withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            String jsonRequest = serializeRequest(req);
 | 
					            String jsonRequest = serialize(req);
 | 
				
			||||||
            OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
 | 
					            OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class);
 | 
				
			||||||
            assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
					            assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
            assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
					            assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
@@ -1,17 +1,17 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
					package io.github.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
				
			||||||
import org.json.JSONObject;
 | 
					import org.json.JSONObject;
 | 
				
			||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
 | 
					import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
					import io.github.ollama4j.utils.OptionsBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
 | 
					public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequest> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private OllamaGenerateRequestBuilder builder;
 | 
					    private OllamaGenerateRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -22,30 +22,30 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestOnlyMandatoryFields() {
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
					        OllamaGenerateRequest req = builder.withPrompt("Some prompt").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
 | 
					        assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequest.class), req);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testRequestWithOptions() {
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
        OptionsBuilder b = new OptionsBuilder();
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
        OllamaGenerateRequestModel req =
 | 
					        OllamaGenerateRequest req =
 | 
				
			||||||
                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
					                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
 | 
					        OllamaGenerateRequest deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequest.class);
 | 
				
			||||||
        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @Test
 | 
					    @Test
 | 
				
			||||||
    public void testWithJsonFormat() {
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
        OllamaGenerateRequestModel req =
 | 
					        OllamaGenerateRequest req =
 | 
				
			||||||
                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
					                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        String jsonRequest = serializeRequest(req);
 | 
					        String jsonRequest = serialize(req);
 | 
				
			||||||
        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
        // of request is never used in real code anyways
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user