mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-10-31 16:40:41 +01:00 
			
		
		
		
	Compare commits
	
		
			231 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 3ab9e4c283 | ||
|   | 2db6a22cc7 | ||
|   | cc69341620 | ||
|   | 4589a9032c | ||
|   | da273402b5 | ||
|   | cfa8aa14d7 | ||
|   | bc4e8303aa | ||
|   | f2f740a2a0 | ||
|   | 4cbb783a61 | ||
|   | 5c9e0b7d8a | ||
|   | 2f8577a24d | ||
|   | 02116b7025 | ||
|   | f3778f8786 | ||
|   | c6141634db | ||
|   | d9f98ad901 | ||
|   | 79d97445b8 | ||
|   | 1c40697c96 | ||
|   | f03026abb3 | ||
|   | 63a6e81ac2 | ||
|   | 76cad0f584 | ||
|   | bee2908d1e | ||
|   | 8a4c9fd969 | ||
|   | d470f940b0 | ||
|   | df402efaba | ||
|   | 677362abbf | ||
|   | 81689be194 | ||
|   | fd93036d08 | ||
|   | c9b05a725b | ||
|   | a4e1b4afe9 | ||
|   | 3d21813abb | ||
|   | 383d0f56ca | ||
|   | af1b213a76 | ||
|   | fed89a9643 | ||
|   | fd32aa33ff | ||
|   | b8a13e89b1 | ||
|   | c8f27edd6e | ||
|   | 5a936d8174 | ||
|   | 9b5ddbf4c4 | ||
|   | 7c233d5734 | ||
|   | e85aeae6e0 | ||
|   | a05052e095 | ||
|   | 10eb803e26 | ||
|   | bd2da8fdda | ||
|   | b0bb082bec | ||
|   | 81f564ef7f | ||
|   | 006b52f3db | ||
|   | 16634e60e4 | ||
|   | db8b73075b | ||
|   | dc9f79959a | ||
|   | 88f6d00763 | ||
|   | fd3a989a49 | ||
|   | 7580c6a549 | ||
|   | 9e6503d84b | ||
|   | ee21f7fdd8 | ||
|   | ecc295f484 | ||
|   | c528fef5fc | ||
|   | 38f1bda105 | ||
|   | d8a703503a | ||
|   | dd9ba7c937 | ||
|   | cf52c9610c | ||
|   | e8d709e99a | ||
|   | 51fbedad69 | ||
|   | 953605fa73 | ||
|   | 30bfdd9c6d | ||
|   | 91ee6cb4c1 | ||
|   | 8ef6fac28e | ||
|   | d9e3860123 | ||
|   | 515d1f0399 | ||
|   | be549430c5 | ||
|   | 4744315d45 | ||
|   | 8eea19a539 | ||
|   | b5801d84e0 | ||
|   | 165d04b1bb | ||
|   | 16d2160b52 | ||
|   | e39c47b8e1 | ||
|   | bb0785140b | ||
|   | e33ad1a1e3 | ||
|   | cd60c506cb | ||
|   | b55925df28 | ||
|   | 3a9b8c309d | ||
|   | bf07159522 | ||
|   | f8ca4d041d | ||
|   | 9c6a55f7b0 | ||
|   | 2866d83a2f | ||
|   | 45e5d07581 | ||
|   | 3a264cb6bb | ||
|   | e1b9d42771 | ||
|   | 1a086c37c0 | ||
|   | 54edba144c | ||
|   | 3ed3187ba9 | ||
|   | b7cd81a7f5 | ||
|   | e750c2d7f9 | ||
|   | 62f16131f3 | ||
|   | 2cbaf12d7c | ||
|   | e2d555d404 | ||
|   | c296b34174 | ||
|   | e8f99f28ec | ||
|   | 250b1abc79 | ||
|   | 42b15ad93f | ||
|   | 6f7a714bae | ||
|   | 92618e5084 | ||
|   | 391a9242c3 | ||
|   | e1b6dc3b54 | ||
|   | 04124cf978 | ||
|   | e4e717b747 | ||
|   | 10d2a8f5ff | ||
|   | 899fa38805 | ||
|   | 2df878c953 | ||
|   | 78a5eedc8f | ||
|   | 364f961ee2 | ||
|   | b21aa6add2 | ||
|   | ec4abd1c2d | ||
|   | 9900ae92fb | ||
|   | fa20daf6e5 | ||
|   | 44949c0559 | ||
|   | e88711a017 | ||
|   | 32169ded18 | ||
|   | 4b2d566fd9 | ||
|   | fb4b7a7ce5 | ||
|   | 18f27775b0 | ||
|   | cb462ad05a | ||
|   | 1eec22ca1a | ||
|   | c1f3c51f88 | ||
|   | 7dd556293f | ||
|   | ee50131ce4 | ||
|   | 2cd47dbfaa | ||
|   | e5296c1067 | ||
|   | 0f00f05e3d | ||
|   | 976a3b82e5 | ||
|   | ba26d620c4 | ||
|   | e45246a767 | ||
|   | 7336668f0c | ||
|   | 11701fb222 | ||
|   | b1ec12c4e9 | ||
|   | d0b0a0fc97 | ||
|   | 20774fca6b | ||
|   | 9c46b510d8 | ||
|   | 9d887b60a8 | ||
|   | 63d4de4e24 | ||
|   | 9224d2da06 | ||
|   | a10692e2f1 | ||
|   | b0c152a42e | ||
|   | f44767e023 | ||
|   | aadef0a57c | ||
|   | 777ee7ffe0 | ||
|   | dcf1d0bdbc | ||
|   | 13b7111a42 | ||
|   | 09442d37a3 | ||
|   | 1e66bdb07f | ||
|   | b423090db9 | ||
|   | a32d94efbf | ||
|   | 31f8302849 | ||
|   | 6487756764 | ||
|   | abb76ad867 | ||
|   | cf4e7a96e8 | ||
|   | 0f414f71a3 | ||
|   | 2b700fdad8 | ||
|   | 06c5daa253 | ||
|   | 91aab6cbd1 | ||
|   | f38a00ebdc | ||
|   | 0f73ea75ab | ||
|   | 8fe869afdb | ||
|   | 2d274c4f5b | ||
|   | 713a3239a4 | ||
|   | a9e7958d44 | ||
|   | f38e84053f | ||
|   | 7eb16b7ba0 | ||
|   | 5a3889d8ee | ||
|   | 2c52f4d0bb | ||
|   | 32c4231eb5 | ||
|   | e9621f054d | ||
|   | b41b62220c | ||
|   | c89440cbca | ||
|   | 1aeb555a53 | ||
|   | 9aff3ec5d9 | ||
|   | b4eaf0cfb5 | ||
|   | 199cb6082d | ||
|   | 37bfe26a6d | ||
|   | 3769386539 | ||
|   | 84a6e57f42 | ||
|   | 14d2474ee9 | ||
|   | ca613ed80a | ||
|   | bbcd458849 | ||
|   | bc885894f8 | ||
|   | bc83df6971 | ||
|   | 43f43c9f81 | ||
|   | 65f00defcf | ||
|   | d716b81342 | ||
|   | 272ba445f6 | ||
|   | d9816d8869 | ||
|   | 874736eb16 | ||
|   | 9c16ccbf81 | ||
|   | 40a3aa31dc | ||
|   | 90669b611b | ||
|   | f10c7ac725 | ||
|   | 38dca3cd0d | ||
|   | 44bb35b168 | ||
|   | 9832caf503 | ||
|   | 0c4e8e306e | ||
|   | 075416eb9c | ||
|   | 4260fbbc32 | ||
|   | 0bec697a86 | ||
|   | 4ca6eef8fd | ||
|   | a635dd9be2 | ||
|   | 14982011d9 | ||
|   | 65d852fdc9 | ||
|   | d483c23c81 | ||
|   | 273b1e47ca | ||
|   | 5c5cdba4cd | ||
|   | 24674ea483 | ||
|   | 5d3a975e4c | ||
|   | ad670c3c62 | ||
|   | f9063484f3 | ||
|   | 5e2a07ad41 | ||
|   | 00a3e51a93 | ||
|   | bc20468f28 | ||
|   | c7ac50a805 | ||
|   | f8cd7bc013 | ||
|   | 3469bf314b | ||
|   | 9636807819 | ||
|   | 455251d1d4 | ||
|   | ec00ffae7f | ||
|   | d969c7ad46 | ||
|   | 02bf769188 | ||
|   | 1c8a6b4f2a | ||
|   | 60fe5d6ffb | ||
|   | 327ae7437f | ||
|   | 795b9f2b9b | ||
|   | 54da069e68 | ||
|   | bfc5cebac1 | ||
|   | d46b1d48d8 | 
							
								
								
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | |||||||
|  | name: Release Artifacts to GitHub Maven Packages | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   release: | ||||||
|  |     types: [ created ] | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   build: | ||||||
|  |  | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     permissions: | ||||||
|  |       contents: read | ||||||
|  |       packages: write | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v3 | ||||||
|  |       - name: Set up JDK 17 | ||||||
|  |         uses: actions/setup-java@v3 | ||||||
|  |         with: | ||||||
|  |           java-version: '17' | ||||||
|  |           distribution: 'temurin' | ||||||
|  |           server-id: github | ||||||
|  |           settings-path: ${{ github.workspace }} | ||||||
|  |  | ||||||
|  |       - name: maven-settings-xml-action | ||||||
|  |         uses: whelk-io/maven-settings-xml-action@v22 | ||||||
|  |         with: | ||||||
|  |           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "mvn-repo-id" | ||||||
|  |           replace: github | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Import GPG key | ||||||
|  |         uses: crazy-max/ghaction-import-gpg@v6 | ||||||
|  |         with: | ||||||
|  |           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||||
|  |           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||||
|  |       - name: List keys | ||||||
|  |         run: gpg -K | ||||||
|  |  | ||||||
|  |       - name: Build with Maven | ||||||
|  |         run: mvn --file pom.xml -U clean package -Punit-tests | ||||||
|  |  | ||||||
|  |       - name: Publish to GitHub Packages Apache Maven | ||||||
|  |         run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j | ||||||
|  |         env: | ||||||
|  |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
							
								
								
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,68 +1,95 @@ | |||||||
| # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||||
| # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||||
|  |  | ||||||
| name: Test and Publish Package | name: Release Artifacts to Maven Central | ||||||
|  |  | ||||||
| #on: |  | ||||||
| #  release: |  | ||||||
| #    types: [ "created" ] |  | ||||||
|  |  | ||||||
| on: | on: | ||||||
|   push: |   release: | ||||||
|     branches: [ "main" ] |     types: [ created ] | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
|  | #on: | ||||||
|  | #  pull_request: | ||||||
|  | #    types: [ opened, reopened ] | ||||||
|  | #    branches: [ "main" ] | ||||||
|  |  | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   build: |   build: | ||||||
|  |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|     permissions: |     permissions: | ||||||
|       contents: write |       contents: write | ||||||
|       packages: write |       packages: write | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v3 |       - uses: actions/checkout@v3 | ||||||
|       - name: Set up JDK 11 |  | ||||||
|  |       - name: Set up JDK 17 | ||||||
|         uses: actions/setup-java@v3 |         uses: actions/setup-java@v3 | ||||||
|         with: |         with: | ||||||
|           java-version: '11' |           java-version: '17' | ||||||
|           distribution: 'adopt-hotspot' |           distribution: 'temurin' | ||||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml |           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file |           settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||||
|       - name: Build with Maven |  | ||||||
|         run: mvn --file pom.xml -U clean package -Punit-tests |       - name: maven-settings-xml-action | ||||||
|       - name: Set up Apache Maven Central (Overwrite settings.xml) |         uses: whelk-io/maven-settings-xml-action@v22 | ||||||
|         uses: actions/setup-java@v3 |  | ||||||
|         with: # running setup-java again overwrites the settings.xml |  | ||||||
|           java-version: '11' |  | ||||||
|           distribution: 'adopt-hotspot' |  | ||||||
|           cache: 'maven' |  | ||||||
|           server-id: ossrh |  | ||||||
|           server-username: MAVEN_USERNAME |  | ||||||
|           server-password: MAVEN_PASSWORD |  | ||||||
|           gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} |  | ||||||
|           gpg-passphrase: MAVEN_GPG_PASSPHRASE |  | ||||||
|       - name: Set up Maven cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |         with: | ||||||
|           path: ~/.m2/repository |           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||||
|           key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} |  | ||||||
|           restore-keys: | |       - name: Import GPG key | ||||||
|             ${{ runner.os }}-maven- |         uses: crazy-max/ghaction-import-gpg@v6 | ||||||
|       - name: Build |         with: | ||||||
|         run: mvn -B -ntp clean install |           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||||
|       - name: Upload coverage reports to Codecov |           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||||
|         uses: codecov/codecov-action@v3 |       - name: List keys | ||||||
|  |         run: gpg -K | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "mvn-repo-id" | ||||||
|  |           replace: central | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Publish to Maven Central | ||||||
|  |         run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }} | ||||||
|  |  | ||||||
|  |       - name: Upload Release Asset - JAR | ||||||
|  |         uses: actions/upload-release-asset@v1 | ||||||
|         env: |         env: | ||||||
|           CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|       - name: Publish to GitHub Packages Apache Maven |         with: | ||||||
|         #        if: > |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|         #          github.event_name != 'pull_request' && |           asset_path: target/ollama4j-${{ github.ref_name }}.jar | ||||||
|         #          github.ref_name == 'main' && |           asset_name: ollama4j-${{ github.ref_name }}.jar | ||||||
|         #          contains(github.event.head_commit.message, 'release') |           asset_content_type: application/x-jar | ||||||
|         run: | |  | ||||||
|           git config --global user.email "koujalgi.amith@gmail.com" |       - name: Upload Release Asset - Javadoc JAR | ||||||
|           git config --global user.name "amithkoujalgi" |         uses: actions/upload-release-asset@v1 | ||||||
|           mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform |  | ||||||
|         env: |         env: | ||||||
|           MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|           MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} |         with: | ||||||
|           MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|  |           asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar | ||||||
|  |           asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar | ||||||
|  |           asset_content_type: application/x-jar | ||||||
|  |  | ||||||
|  |       - name: Upload Release Asset - Sources JAR | ||||||
|  |         uses: actions/upload-release-asset@v1 | ||||||
|  |         env: | ||||||
|  |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |         with: | ||||||
|  |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|  |           asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar | ||||||
|  |           asset_name: ollama4j-${{ github.ref_name }}-sources.jar | ||||||
|  |           asset_content_type: application/x-jar | ||||||
							
								
								
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							| @@ -2,9 +2,8 @@ | |||||||
| name: Deploy Docs to GH Pages | name: Deploy Docs to GH Pages | ||||||
|  |  | ||||||
| on: | on: | ||||||
|   # Runs on pushes targeting the default branch |   release: | ||||||
|   push: |     types: [ created ] | ||||||
|     branches: [ "main" ] |  | ||||||
|  |  | ||||||
|   # Allows you to run this workflow manually from the Actions tab |   # Allows you to run this workflow manually from the Actions tab | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| @@ -47,6 +46,13 @@ jobs: | |||||||
|       - run: cd docs && npm ci |       - run: cd docs && npm ci | ||||||
|       - run: cd docs && npm run build |       - run: cd docs && npm run build | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|       - name: Build with Maven |       - name: Build with Maven | ||||||
|         run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs |         run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,52 +0,0 @@ | |||||||
| # Simple workflow for deploying static content to GitHub Pages |  | ||||||
| name: Deploy Javadoc content to Pages |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   # Runs on pushes targeting the default branch |  | ||||||
|   push: |  | ||||||
|     branches: [ "none" ] |  | ||||||
|  |  | ||||||
|   # Allows you to run this workflow manually from the Actions tab |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|   pages: write |  | ||||||
|   id-token: write |  | ||||||
|   packages: write |  | ||||||
| # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. |  | ||||||
| # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. |  | ||||||
| concurrency: |  | ||||||
|   group: "pages" |  | ||||||
|   cancel-in-progress: false |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   # Single deploy job since we're just deploying |  | ||||||
|   deploy: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     environment: |  | ||||||
|       name: github-pages |  | ||||||
|       url: ${{ steps.deployment.outputs.page_url }} |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|       - name: Set up JDK 11 |  | ||||||
|         uses: actions/setup-java@v3 |  | ||||||
|         with: |  | ||||||
|           java-version: '11' |  | ||||||
|           distribution: 'adopt-hotspot' |  | ||||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml |  | ||||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file |  | ||||||
|       - name: Build with Maven |  | ||||||
|         run: mvn --file pom.xml -U clean package |  | ||||||
|       - name: Setup Pages |  | ||||||
|         uses: actions/configure-pages@v3 |  | ||||||
|       - name: Upload artifact |  | ||||||
|         uses: actions/upload-pages-artifact@v2 |  | ||||||
|         with: |  | ||||||
|           # Upload entire repository |  | ||||||
|           path: './target/apidocs/.' |  | ||||||
|       - name: Deploy to GitHub Pages |  | ||||||
|         id: deployment |  | ||||||
|         uses: actions/deploy-pages@v2 |  | ||||||
							
								
								
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,128 @@ | |||||||
|  | # Contributor Covenant Code of Conduct | ||||||
|  |  | ||||||
|  | ## Our Pledge | ||||||
|  |  | ||||||
|  | We as members, contributors, and leaders pledge to make participation in our | ||||||
|  | community a harassment-free experience for everyone, regardless of age, body | ||||||
|  | size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||||
|  | identity and expression, level of experience, education, socio-economic status, | ||||||
|  | nationality, personal appearance, race, religion, or sexual identity | ||||||
|  | and orientation. | ||||||
|  |  | ||||||
|  | We pledge to act and interact in ways that contribute to an open, welcoming, | ||||||
|  | diverse, inclusive, and healthy community. | ||||||
|  |  | ||||||
|  | ## Our Standards | ||||||
|  |  | ||||||
|  | Examples of behavior that contributes to a positive environment for our | ||||||
|  | community include: | ||||||
|  |  | ||||||
|  | * Demonstrating empathy and kindness toward other people | ||||||
|  | * Being respectful of differing opinions, viewpoints, and experiences | ||||||
|  | * Giving and gracefully accepting constructive feedback | ||||||
|  | * Accepting responsibility and apologizing to those affected by our mistakes, | ||||||
|  |   and learning from the experience | ||||||
|  | * Focusing on what is best not just for us as individuals, but for the | ||||||
|  |   overall community | ||||||
|  |  | ||||||
|  | Examples of unacceptable behavior include: | ||||||
|  |  | ||||||
|  | * The use of sexualized language or imagery, and sexual attention or | ||||||
|  |   advances of any kind | ||||||
|  | * Trolling, insulting or derogatory comments, and personal or political attacks | ||||||
|  | * Public or private harassment | ||||||
|  | * Publishing others' private information, such as a physical or email | ||||||
|  |   address, without their explicit permission | ||||||
|  | * Other conduct which could reasonably be considered inappropriate in a | ||||||
|  |   professional setting | ||||||
|  |  | ||||||
|  | ## Enforcement Responsibilities | ||||||
|  |  | ||||||
|  | Community leaders are responsible for clarifying and enforcing our standards of | ||||||
|  | acceptable behavior and will take appropriate and fair corrective action in | ||||||
|  | response to any behavior that they deem inappropriate, threatening, offensive, | ||||||
|  | or harmful. | ||||||
|  |  | ||||||
|  | Community leaders have the right and responsibility to remove, edit, or reject | ||||||
|  | comments, commits, code, wiki edits, issues, and other contributions that are | ||||||
|  | not aligned to this Code of Conduct, and will communicate reasons for moderation | ||||||
|  | decisions when appropriate. | ||||||
|  |  | ||||||
|  | ## Scope | ||||||
|  |  | ||||||
|  | This Code of Conduct applies within all community spaces, and also applies when | ||||||
|  | an individual is officially representing the community in public spaces. | ||||||
|  | Examples of representing our community include using an official e-mail address, | ||||||
|  | posting via an official social media account, or acting as an appointed | ||||||
|  | representative at an online or offline event. | ||||||
|  |  | ||||||
|  | ## Enforcement | ||||||
|  |  | ||||||
|  | Instances of abusive, harassing, or otherwise unacceptable behavior may be | ||||||
|  | reported to the community leaders responsible for enforcement at | ||||||
|  | koujalgi.amith@gmail.com. | ||||||
|  | All complaints will be reviewed and investigated promptly and fairly. | ||||||
|  |  | ||||||
|  | All community leaders are obligated to respect the privacy and security of the | ||||||
|  | reporter of any incident. | ||||||
|  |  | ||||||
|  | ## Enforcement Guidelines | ||||||
|  |  | ||||||
|  | Community leaders will follow these Community Impact Guidelines in determining | ||||||
|  | the consequences for any action they deem in violation of this Code of Conduct: | ||||||
|  |  | ||||||
|  | ### 1. Correction | ||||||
|  |  | ||||||
|  | **Community Impact**: Use of inappropriate language or other behavior deemed | ||||||
|  | unprofessional or unwelcome in the community. | ||||||
|  |  | ||||||
|  | **Consequence**: A private, written warning from community leaders, providing | ||||||
|  | clarity around the nature of the violation and an explanation of why the | ||||||
|  | behavior was inappropriate. A public apology may be requested. | ||||||
|  |  | ||||||
|  | ### 2. Warning | ||||||
|  |  | ||||||
|  | **Community Impact**: A violation through a single incident or series | ||||||
|  | of actions. | ||||||
|  |  | ||||||
|  | **Consequence**: A warning with consequences for continued behavior. No | ||||||
|  | interaction with the people involved, including unsolicited interaction with | ||||||
|  | those enforcing the Code of Conduct, for a specified period of time. This | ||||||
|  | includes avoiding interactions in community spaces as well as external channels | ||||||
|  | like social media. Violating these terms may lead to a temporary or | ||||||
|  | permanent ban. | ||||||
|  |  | ||||||
|  | ### 3. Temporary Ban | ||||||
|  |  | ||||||
|  | **Community Impact**: A serious violation of community standards, including | ||||||
|  | sustained inappropriate behavior. | ||||||
|  |  | ||||||
|  | **Consequence**: A temporary ban from any sort of interaction or public | ||||||
|  | communication with the community for a specified period of time. No public or | ||||||
|  | private interaction with the people involved, including unsolicited interaction | ||||||
|  | with those enforcing the Code of Conduct, is allowed during this period. | ||||||
|  | Violating these terms may lead to a permanent ban. | ||||||
|  |  | ||||||
|  | ### 4. Permanent Ban | ||||||
|  |  | ||||||
|  | **Community Impact**: Demonstrating a pattern of violation of community | ||||||
|  | standards, including sustained inappropriate behavior,  harassment of an | ||||||
|  | individual, or aggression toward or disparagement of classes of individuals. | ||||||
|  |  | ||||||
|  | **Consequence**: A permanent ban from any sort of public interaction within | ||||||
|  | the community. | ||||||
|  |  | ||||||
|  | ## Attribution | ||||||
|  |  | ||||||
|  | This Code of Conduct is adapted from the [Contributor Covenant][homepage], | ||||||
|  | version 2.0, available at | ||||||
|  | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. | ||||||
|  |  | ||||||
|  | Community Impact Guidelines were inspired by [Mozilla's code of conduct | ||||||
|  | enforcement ladder](https://github.com/mozilla/diversity). | ||||||
|  |  | ||||||
|  | [homepage]: https://www.contributor-covenant.org | ||||||
|  |  | ||||||
|  | For answers to common questions about this code of conduct, see the FAQ at | ||||||
|  | https://www.contributor-covenant.org/faq. Translations are available at | ||||||
|  | https://www.contributor-covenant.org/translations. | ||||||
							
								
								
									
										4
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Makefile
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | |||||||
| build: | build: | ||||||
| 	mvn -B clean install | 	mvn -B clean install | ||||||
|  |  | ||||||
| ut: | unit-tests: | ||||||
| 	mvn clean test -Punit-tests | 	mvn clean test -Punit-tests | ||||||
|  |  | ||||||
| it: | integration-tests: | ||||||
| 	mvn clean verify -Pintegration-tests | 	mvn clean verify -Pintegration-tests | ||||||
|  |  | ||||||
| doxygen: | doxygen: | ||||||
|   | |||||||
							
								
								
									
										231
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										231
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,31 +1,46 @@ | |||||||
| ### Ollama4j | ### Ollama4j | ||||||
|  |  | ||||||
| <img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | <p align="center"> | ||||||
|  |   <img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  |  | ||||||
| A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. | A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. | ||||||
|  |  | ||||||
| Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | Find more details on the [website](https://ollama4j.github.io/ollama4j/). | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [](https://codecov.io/gh/amithkoujalgi/ollama4j) | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  | [//]: # () | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [](https://codecov.io/gh/ollama4j/ollama4j) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
| ## Table of Contents | ## Table of Contents | ||||||
|  |  | ||||||
| - [How does it work?](#how-does-it-work) | - [How does it work?](#how-does-it-work) | ||||||
| - [Requirements](#requirements) | - [Requirements](#requirements) | ||||||
| - [Installation](#installation) | - [Installation](#installation) | ||||||
| - [API Spec](#api-spec) | - [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management) | ||||||
| - [Demo APIs](#try-out-the-apis-with-ollama-server) | - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) | ||||||
| - [Development](#development) | - [Development](#development) | ||||||
| - [Contributions](#get-involved) | - [Contributions](#get-involved) | ||||||
| - [References](#references) | - [References](#references) | ||||||
| @@ -46,44 +61,132 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | |||||||
|  |  | ||||||
| #### Requirements | #### Requirements | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker] | [![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker] | ||||||
|  |  | ||||||
| [ollama]: https://ollama.ai/ | [ollama-link]: https://ollama.ai/ | ||||||
|  |  | ||||||
| [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray | [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray | ||||||
|  |  | ||||||
| [ollama-docker]: https://hub.docker.com/r/ollama/ollama | [ollama-docker]: https://hub.docker.com/r/ollama/ollama | ||||||
|  |  | ||||||
| [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray | [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray | ||||||
|  |  | ||||||
| #### Installation | ## Installation | ||||||
|  |  | ||||||
| In your Maven project, add this dependency: | > [!NOTE] | ||||||
|  | > We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with | ||||||
|  | > publishing. Please update your repository settings to get latest version of Ollama4j. | ||||||
|  | > | ||||||
|  | > Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version | ||||||
|  | > according to your requirements. | ||||||
|  |  | ||||||
|  | ### For Maven | ||||||
|  |  | ||||||
|  | #### Using [Maven Central](https://central.sonatype.com/) | ||||||
|  |  | ||||||
|  | [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link] | ||||||
|  |  | ||||||
|  | [ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases | ||||||
|  |  | ||||||
|  | [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20 | ||||||
|  |  | ||||||
|  | 1In your Maven project, add this dependency: | ||||||
|  |  | ||||||
| ```xml | ```xml | ||||||
|  |  | ||||||
| <dependency> | <dependency> | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.ollama4j</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.29</version> |     <version>1.0.78</version> | ||||||
| </dependency> | </dependency> | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Latest release: | #### Using GitHub's Maven Package Repository | ||||||
|  |  | ||||||
|  | [![][ollama4j-releases-shield]][ollama4j-releases-link] | ||||||
|  |  | ||||||
| [![][lib-shield]][lib] | [ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview | ||||||
|  |  | ||||||
| [lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j | [ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20 | ||||||
|  |  | ||||||
|  | 1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`: | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <repositories> | ||||||
|  |     <repository> | ||||||
|  |         <id>github</id> | ||||||
|  |         <name>GitHub Apache Maven Packages</name> | ||||||
|  |         <url>https://maven.pkg.github.com/ollama4j/ollama4j</url> | ||||||
|  |         <releases> | ||||||
|  |             <enabled>true</enabled> | ||||||
|  |         </releases> | ||||||
|  |         <snapshots> | ||||||
|  |             <enabled>true</enabled> | ||||||
|  |         </snapshots> | ||||||
|  |     </repository> | ||||||
|  | </repositories> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | 2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml) | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0" | ||||||
|  |           xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||||
|  |           xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 | ||||||
|  |                       http://maven.apache.org/xsd/settings-1.0.0.xsd"> | ||||||
|  |     <servers> | ||||||
|  |         <server> | ||||||
|  |             <id>github</id> | ||||||
|  |             <username>YOUR-USERNAME</username> | ||||||
|  |             <password>YOUR-TOKEN</password> | ||||||
|  |         </server> | ||||||
|  |     </servers> | ||||||
|  | </settings> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | 3. In your Maven project, add this dependency: | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <dependency> | ||||||
|  |     <groupId>io.github.ollama4j</groupId> | ||||||
|  |     <artifactId>ollama4j</artifactId> | ||||||
|  |     <version>1.0.78</version> | ||||||
|  | </dependency> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ##### For Gradle | ||||||
|  |  | ||||||
|  | 1. Add the dependency | ||||||
|  |  | ||||||
|  | ```groovy | ||||||
|  | dependencies { | ||||||
|  |   implementation 'com.github.ollama4j:ollama4j:1.0.78' | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | [//]: # (Latest release:) | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # ([![][lib-shield]][lib]) | ||||||
|  |  | ||||||
|  | [lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j | ||||||
|  |  | ||||||
| [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray | [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray | ||||||
|  |  | ||||||
| #### API Spec | #### API Spec | ||||||
|  |  | ||||||
| Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/). | > [!TIP] | ||||||
|  | > Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/). | ||||||
|  |  | ||||||
| #### Development | #### Development | ||||||
|  |  | ||||||
| @@ -96,23 +199,32 @@ make build | |||||||
| Run unit tests: | Run unit tests: | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| make ut | make unit-tests | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Run integration tests: | Run integration tests: | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| make it | make integration-tests | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| #### Releases | #### Releases | ||||||
|  |  | ||||||
| Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub | Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch. | ||||||
| Actions CI workflow. |  | ||||||
|  | #### Who's using Ollama4j? | ||||||
|  |  | ||||||
|  | - `Datafaker`: a library to generate fake data | ||||||
|  |     - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api | ||||||
|  | - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j | ||||||
|  |     - https://github.com/TEAMPB/ollama4j-vaadin-ui | ||||||
|  | - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the | ||||||
|  |   server to translate all messages into a specfic target language. | ||||||
|  |     - https://github.com/liebki/ollama-translator | ||||||
|  |  | ||||||
| #### Traction | #### Traction | ||||||
|  |  | ||||||
| [](https://star-history.com/#amithkoujalgi/ollama4j&Date) | [](https://star-history.com/#ollama4j/ollama4j&Date) | ||||||
|  |  | ||||||
| ### Areas of improvement | ### Areas of improvement | ||||||
|  |  | ||||||
| @@ -124,30 +236,69 @@ Actions CI workflow. | |||||||
| - [x] Use lombok | - [x] Use lombok | ||||||
| - [x] Update request body creation with Java objects | - [x] Update request body creation with Java objects | ||||||
| - [ ] Async APIs for images | - [ ] Async APIs for images | ||||||
|  | - [ ] Support for function calling with models like Mistral | ||||||
|  |     - [x] generate in sync mode | ||||||
|  |     - [ ] generate in async mode | ||||||
| - [ ] Add custom headers to requests | - [ ] Add custom headers to requests | ||||||
| - [ ] Add additional params for `ask` APIs such as: | - [x] Add additional params for `ask` APIs such as: | ||||||
|     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - |     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - | ||||||
|       Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). |       Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||||
|     - [ ] `system`: system prompt to (overrides what is defined in the Modelfile) |     - [x] `system`: system prompt to (overrides what is defined in the Modelfile) | ||||||
|     - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) |     - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) | ||||||
|     - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a |     - [x] `context`: the context parameter returned from a previous request, which can be used to keep a | ||||||
|       short |       short | ||||||
|       conversational memory |       conversational memory | ||||||
|     - [ ] `stream`: Add support for streaming responses from the model |     - [x] `stream`: Add support for streaming responses from the model | ||||||
| - [ ] Add test cases | - [ ] Add test cases | ||||||
| - [ ] Handle exceptions better (maybe throw more appropriate exceptions) | - [ ] Handle exceptions better (maybe throw more appropriate exceptions) | ||||||
|  |  | ||||||
| ### Get Involved | ### Get Involved | ||||||
|  |  | ||||||
|  | <div align="center"> | ||||||
|  |  | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  |  | ||||||
|  | </div> | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
| Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping | Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping | ||||||
| with code - any sort | with code - any sort | ||||||
| of contribution is much appreciated. | of contribution is much appreciated. | ||||||
|  |  | ||||||
|  | ### References | ||||||
|  |  | ||||||
|  | - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | ||||||
|  |  | ||||||
| ### Credits | ### Credits | ||||||
|  |  | ||||||
| The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | ||||||
| project. | project. | ||||||
|  |  | ||||||
| ### References | **Thanks to the amazing contributors** | ||||||
|  |  | ||||||
| - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | <p align="center"> | ||||||
|  |   <a href="https://github.com/ollama4j/ollama4j/graphs/contributors"> | ||||||
|  |     <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" /> | ||||||
|  |   </a> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  | ### Appreciate my work? | ||||||
|  |  | ||||||
|  | <p align="center"> | ||||||
|  |   <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> | ||||||
|  | </p> | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀 | |||||||
| I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java | I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java | ||||||
| applications! 🌐🚀 | applications! 🌐🚀 | ||||||
|  |  | ||||||
| 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j) | 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j) | ||||||
|  |  | ||||||
| 🌟 Key Features: | 🌟 Key Features: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,42 +0,0 @@ | |||||||
| --- |  | ||||||
| sidebar_position: 2 |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| # Ask - Async |  | ||||||
|  |  | ||||||
| This API lets you ask questions to the LLMs in a asynchronous way. |  | ||||||
| These APIs correlate to |  | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. |  | ||||||
|  |  | ||||||
| ```java |  | ||||||
| public class Main { |  | ||||||
|  |  | ||||||
|     public static void main(String[] args) { |  | ||||||
|  |  | ||||||
|         String host = "http://localhost:11434/"; |  | ||||||
|  |  | ||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |  | ||||||
|  |  | ||||||
|         String prompt = "Who are you?"; |  | ||||||
|  |  | ||||||
|         OllamaAsyncResultCallback callback = ollamaAPI.askAsync(OllamaModelType.LLAMA2, prompt); |  | ||||||
|  |  | ||||||
|         while (!callback.isComplete() || !callback.getStream().isEmpty()) { |  | ||||||
|             // poll for data from the response stream |  | ||||||
|             String result = callback.getStream().poll(); |  | ||||||
|             if (response != null) { |  | ||||||
|                 System.out.print(result.getResponse()); |  | ||||||
|             } |  | ||||||
|             Thread.sleep(100); |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| } |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| You will get a response similar to: |  | ||||||
|  |  | ||||||
| > I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational |  | ||||||
| > manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide |  | ||||||
| > range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that |  | ||||||
| > require |  | ||||||
| > natural language understanding and generation capabilities. |  | ||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Extras", |   "label": "APIs - Extras", | ||||||
|   "position": 10, |   "position": 4, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to handle bunch of extra stuff." |     "description": "Details of APIs to handle bunch of extra stuff." | ||||||
|   | |||||||
| @@ -31,7 +31,7 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md | |||||||
|  |  | ||||||
| Also, see how to set those Ollama parameters using | Also, see how to set those Ollama parameters using | ||||||
| the `OptionsBuilder` | the `OptionsBuilder` | ||||||
| from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html). | from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html). | ||||||
|  |  | ||||||
| ## Build an empty `Options` object | ## Build an empty `Options` object | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Ask", |   "label": "APIs - Generate", | ||||||
|   "position": 10, |   "position": 3, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to interact with LLMs." |     "description": "Details of APIs to interact with LLMs." | ||||||
							
								
								
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										205
									
								
								docs/docs/apis-generate/chat.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,205 @@ | |||||||
|  | --- | ||||||
|  | sidebar_position: 7 | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # Chat | ||||||
|  |  | ||||||
|  | This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including | ||||||
|  | information using the history of already asked questions and the respective answers. | ||||||
|  |  | ||||||
|  | ## Create a new conversation and use chat history to augment follow up questions | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |  | ||||||
|  |         // create first user question | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // start conversation with model | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println("First answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         // create next userQuestion | ||||||
|  |         requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build(); | ||||||
|  |  | ||||||
|  |         // "continue" conversation with model | ||||||
|  |         chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println("Second answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         System.out.println("Chat History: " + chatResult.getChatHistory()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > First answer: Should be Paris! | ||||||
|  | > | ||||||
|  | > Second answer: Marseille. | ||||||
|  | > | ||||||
|  | > Chat History: | ||||||
|  |  | ||||||
|  | ```json | ||||||
|  | [ | ||||||
|  |   { | ||||||
|  |     "role": "user", | ||||||
|  |     "content": "What is the capital of France?", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "assistant", | ||||||
|  |     "content": "Should be Paris!", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "user", | ||||||
|  |     "content": "And what is the second largest city?", | ||||||
|  |     "images": [] | ||||||
|  |   }, | ||||||
|  |   { | ||||||
|  |     "role": "assistant", | ||||||
|  |     "content": "Marseille.", | ||||||
|  |     "images": [] | ||||||
|  |   } | ||||||
|  | ] | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Create a conversation where the answer is streamed | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||||
|  |                         "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // define a handler (Consumer<String>) | ||||||
|  |         OllamaStreamHandler streamHandler = (s) -> { | ||||||
|  |             System.out.println(s); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > The | ||||||
|  | > The capital | ||||||
|  | > The capital of | ||||||
|  | > The capital of France | ||||||
|  | > The capital of France is | ||||||
|  | > The capital of France is Paris | ||||||
|  | > The capital of France is Paris. | ||||||
|  |  | ||||||
|  | ## Use a simple Console Output Stream Handler | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; | ||||||
|  |  | ||||||
|  | public class Main { | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |  | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") | ||||||
|  |                 .build(); | ||||||
|  |         OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); | ||||||
|  |         ollamaAPI.chat(requestModel, streamHandler); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Create a new conversation with individual system prompt | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |  | ||||||
|  |         // create request with system-prompt (overriding the model defaults) and user question | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||||
|  |                 .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         // start conversation with model | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |  | ||||||
|  |         System.out.println(chatResult.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > NI. | ||||||
|  |  | ||||||
|  | ## Create a conversation about an image (requires model with image recognition skills) | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) { | ||||||
|  |  | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); | ||||||
|  |  | ||||||
|  |         // Load Image from File and attach to user message (alternatively images could also be added via URL) | ||||||
|  |         OllamaChatRequestModel requestModel = | ||||||
|  |                 builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |                         List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||||
|  |  | ||||||
|  |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |         System.out.println("First answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |         builder.reset(); | ||||||
|  |  | ||||||
|  |         // Use history to ask further questions about the image or assistant answer | ||||||
|  |         requestModel = | ||||||
|  |                 builder.withMessages(chatResult.getChatHistory()) | ||||||
|  |                         .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||||
|  |  | ||||||
|  |         chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |         System.out.println("Second answer: " + chatResult.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | > First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two | ||||||
|  | > levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and | ||||||
|  | > comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early | ||||||
|  | > evening, given the warm lighting and the low position of the sun in the sky. | ||||||
|  | > | ||||||
|  | > Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog | ||||||
|  | > appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever | ||||||
|  | > or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed | ||||||
|  | > confidently. | ||||||
							
								
								
									
										46
									
								
								docs/docs/apis-generate/generate-async.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								docs/docs/apis-generate/generate-async.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | |||||||
|  | --- | ||||||
|  | sidebar_position: 2 | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # Generate - Async | ||||||
|  |  | ||||||
|  | This API lets you ask questions to the LLMs in a asynchronous way. | ||||||
|  | This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the | ||||||
|  | background (such as threads) without blocking your code until the response arrives from the model. | ||||||
|  |  | ||||||
|  | This API corresponds to | ||||||
|  | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  |  | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         ollamaAPI.setRequestTimeoutSeconds(60); | ||||||
|  |         String prompt = "List all cricket world cup teams of 2019."; | ||||||
|  |         OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false); | ||||||
|  |  | ||||||
|  |         // Set the poll interval according to your needs.  | ||||||
|  |         // Smaller the poll interval, more frequently you receive the tokens. | ||||||
|  |         int pollIntervalMilliseconds = 1000; | ||||||
|  |  | ||||||
|  |         while (true) { | ||||||
|  |             String tokens = streamer.getStream().poll(); | ||||||
|  |             System.out.print(tokens); | ||||||
|  |             if (!streamer.isAlive()) { | ||||||
|  |                 break; | ||||||
|  |             } | ||||||
|  |             Thread.sleep(pollIntervalMilliseconds); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         System.out.println("\n------------------------"); | ||||||
|  |         System.out.println("Complete Response:"); | ||||||
|  |         System.out.println("------------------------"); | ||||||
|  |  | ||||||
|  |         System.out.println(streamer.getResult()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will get a steaming response. | ||||||
| @@ -1,12 +1,12 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 3 | sidebar_position: 4 | ||||||
| --- | --- | ||||||
| 
 | 
 | ||||||
| # Ask - With Image Files | # Generate - With Image Files | ||||||
| 
 | 
 | ||||||
| This API lets you ask questions along with the image files to the LLMs. | This API lets you ask questions along with the image files to the LLMs. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
| 
 | 
 | ||||||
| :::note | :::note | ||||||
| 
 | 
 | ||||||
| @@ -15,7 +15,7 @@ recommended. | |||||||
| 
 | 
 | ||||||
| ::: | ::: | ||||||
| 
 | 
 | ||||||
| ## Ask (Sync) | ## Synchronous mode | ||||||
| 
 | 
 | ||||||
| If you have this image downloaded and you pass the path to the downloaded image to the following code: | If you have this image downloaded and you pass the path to the downloaded image to the following code: | ||||||
| 
 | 
 | ||||||
| @@ -29,7 +29,7 @@ public class Main { | |||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|         ollamaAPI.setRequestTimeoutSeconds(10); |         ollamaAPI.setRequestTimeoutSeconds(10); | ||||||
| 
 | 
 | ||||||
|         OllamaResult result = ollamaAPI.askWithImageFiles(OllamaModelType.LLAVA, |         OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA, | ||||||
|                 "What's in this image?", |                 "What's in this image?", | ||||||
|                 List.of( |                 List.of( | ||||||
|                         new File("/path/to/image"))); |                         new File("/path/to/image"))); | ||||||
| @@ -1,12 +1,12 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 4 | sidebar_position: 5 | ||||||
| --- | --- | ||||||
| 
 | 
 | ||||||
| # Ask - With Image URLs | # Generate - With Image URLs | ||||||
| 
 | 
 | ||||||
| This API lets you ask questions along with the image files to the LLMs. | This API lets you ask questions along with the image files to the LLMs. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
| 
 | 
 | ||||||
| :::note | :::note | ||||||
| 
 | 
 | ||||||
| @@ -29,7 +29,7 @@ public class Main { | |||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|         ollamaAPI.setRequestTimeoutSeconds(10); |         ollamaAPI.setRequestTimeoutSeconds(10); | ||||||
| 
 | 
 | ||||||
|         OllamaResult result = ollamaAPI.askWithImageURLs(OllamaModelType.LLAVA, |         OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA, | ||||||
|                 "What's in this image?", |                 "What's in this image?", | ||||||
|                 List.of( |                 List.of( | ||||||
|                         "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")); |                         "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")); | ||||||
							
								
								
									
										368
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										368
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,368 @@ | |||||||
|  | --- | ||||||
|  | sidebar_position: 3 | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # Generate - With Tools | ||||||
|  |  | ||||||
|  | This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a | ||||||
|  | synchronous way. | ||||||
|  | This API corresponds to | ||||||
|  | the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. | ||||||
|  |  | ||||||
|  | :::note | ||||||
|  |  | ||||||
|  | This is an only an experimental implementation and has a very basic design. | ||||||
|  |  | ||||||
|  | Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign | ||||||
|  | this | ||||||
|  | in the future if tooling is supported for more models with a generic interaction standard from Ollama. | ||||||
|  |  | ||||||
|  | ::: | ||||||
|  |  | ||||||
|  | ### Function Calling/Tools | ||||||
|  |  | ||||||
|  | Assume you want to call a method in your code based on the response generated from the model. | ||||||
|  | For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the | ||||||
|  | transaction from your database and respond to the user with the transaction details. | ||||||
|  |  | ||||||
|  | You could do that with ease with the `function calling` capabilities of the models by registering your `tools`. | ||||||
|  |  | ||||||
|  | ### Create Functions | ||||||
|  |  | ||||||
|  | This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns | ||||||
|  | fuel price value. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("location").toString(); | ||||||
|  |     String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | This function takes the argument `city` and performs an operation with the argument and returns the weather for a | ||||||
|  | location. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public static String getCurrentWeather(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("city").toString(); | ||||||
|  |     return "Currently " + location + "'s weather is nice."; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | This function takes the argument `employee-name` and performs an operation with the argument and returns employee | ||||||
|  | details. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | class DBQueryFunction implements ToolFunction { | ||||||
|  |     @Override | ||||||
|  |     public Object apply(Map<String, Object> arguments) { | ||||||
|  |         // perform DB operations here | ||||||
|  |         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Define Tool Specifications | ||||||
|  |  | ||||||
|  | Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`). | ||||||
|  | - Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("current-fuel-price") | ||||||
|  |         .functionDescription("Get current fuel price") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                         .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Lets also define a sample tool specification called **Weather Tool** for getting the current weather. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` property (`city`). | ||||||
|  | - Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("current-weather") | ||||||
|  |         .functionDescription("Get current weather") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(SampleTools::getCurrentWeather) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` property (`employee-name`). | ||||||
|  | - Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("get-employee-details") | ||||||
|  |         .functionDescription("Get employee details from the database") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||||
|  |                         .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||||
|  |                         .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(new DBQueryFunction()) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Register the Tools | ||||||
|  |  | ||||||
|  | Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||||
|  | ollamaAPI.registerTool(weatherToolSpecification); | ||||||
|  | ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Create prompt with Tools | ||||||
|  |  | ||||||
|  | `Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt1 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the petrol price in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Now, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | `Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt2 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the current weather in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Again, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | `Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt3 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withToolSpecification(databaseQueryToolSpecification) | ||||||
|  |                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Again, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||||
|  | Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | ### Full Example | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.Tools; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.Arrays; | ||||||
|  | import java.util.Map; | ||||||
|  | import java.util.UUID; | ||||||
|  |  | ||||||
|  | public class FunctionCallingWithMistralExample { | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         ollamaAPI.setRequestTimeoutSeconds(60); | ||||||
|  |  | ||||||
|  |         String model = "mistral"; | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("current-fuel-price") | ||||||
|  |                 .functionDescription("Get current fuel price") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                                 .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("current-weather") | ||||||
|  |                 .functionDescription("Get current weather") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(SampleTools::getCurrentWeather) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("get-employee-details") | ||||||
|  |                 .functionDescription("Get employee details from the database") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||||
|  |                                 .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||||
|  |                                 .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(new DBQueryFunction()) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||||
|  |         ollamaAPI.registerTool(weatherToolSpecification); | ||||||
|  |         ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||||
|  |  | ||||||
|  |         String prompt1 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the petrol price in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt1); | ||||||
|  |  | ||||||
|  |         String prompt2 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the current weather in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt2); | ||||||
|  |  | ||||||
|  |         String prompt3 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withToolSpecification(databaseQueryToolSpecification) | ||||||
|  |                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt3); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||||
|  |         OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build()); | ||||||
|  |         for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |             System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SampleTools { | ||||||
|  |     public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |         // Get details from fuel price API | ||||||
|  |         String location = arguments.get("location").toString(); | ||||||
|  |         String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |         return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static String getCurrentWeather(Map<String, Object> arguments) { | ||||||
|  |         // Get details from weather API | ||||||
|  |         String location = arguments.get("city").toString(); | ||||||
|  |         return "Currently " + location + "'s weather is nice."; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class DBQueryFunction implements ToolFunction { | ||||||
|  |     @Override | ||||||
|  |     public Object apply(Map<String, Object> arguments) { | ||||||
|  |         // perform DB operations here | ||||||
|  |         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Run this full example and you will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||||
|  |  | ||||||
|  | [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||||
|  | Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | ### Room for improvement | ||||||
|  |  | ||||||
|  | Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool | ||||||
|  | registration. For example: | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  |  | ||||||
|  | @ToolSpec(name = "current-fuel-price", desc = "Get current fuel price") | ||||||
|  | public String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("location").toString(); | ||||||
|  |     String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing | ||||||
|  | specific args separately with their data types. For example: | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | public String getCurrentFuelPrice(String location, String fuelType) { | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Updating async/chat APIs with support for tool-based generation.  | ||||||
| @@ -2,16 +2,16 @@ | |||||||
| sidebar_position: 1 | sidebar_position: 1 | ||||||
| --- | --- | ||||||
| 
 | 
 | ||||||
| # Ask - Sync | # Generate - Sync | ||||||
| 
 | 
 | ||||||
| This API lets you ask questions to the LLMs in a synchronous way. | This API lets you ask questions to the LLMs in a synchronous way. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
| 
 | 
 | ||||||
| Use the `OptionBuilder` to build the `Options` object | Use the `OptionBuilder` to build the `Options` object | ||||||
| with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||||
| Refer | Refer | ||||||
| to [this](/docs/apis-extras/options-builder). | to [this](/apis-extras/options-builder). | ||||||
| 
 | 
 | ||||||
| ## Try asking a question about the model. | ## Try asking a question about the model. | ||||||
| 
 | 
 | ||||||
| @@ -25,7 +25,7 @@ public class Main { | |||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
| 
 | 
 | ||||||
|         OllamaResult result = |         OllamaResult result = | ||||||
|                 ollamaAPI.ask(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build()); |                 ollamaAPI.generate(OllamaModelType.LLAMA2, "Who are you?", new OptionsBuilder().build()); | ||||||
| 
 | 
 | ||||||
|         System.out.println(result.getResponse()); |         System.out.println(result.getResponse()); | ||||||
|     } |     } | ||||||
| @@ -41,6 +41,42 @@ You will get a response similar to: | |||||||
| > require | > require | ||||||
| > natural language understanding and generation capabilities. | > natural language understanding and generation capabilities. | ||||||
| 
 | 
 | ||||||
|  | ## Try asking a question, receiving the answer streamed | ||||||
|  | 
 | ||||||
|  | ```java | ||||||
|  | public class Main { | ||||||
|  | 
 | ||||||
|  |     public static void main(String[] args) { | ||||||
|  | 
 | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  | 
 | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         // define a stream handler (Consumer<String>) | ||||||
|  |         OllamaStreamHandler streamHandler = (s) -> { | ||||||
|  |             System.out.println(s); | ||||||
|  |         }; | ||||||
|  | 
 | ||||||
|  |         // Should be called using seperate thread to gain non blocking streaming effect. | ||||||
|  |         OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||||
|  |                 "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                 new OptionsBuilder().build(), streamHandler); | ||||||
|  | 
 | ||||||
|  |         System.out.println("Full response: " + result.getResponse()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  | 
 | ||||||
|  | You will get a response similar to: | ||||||
|  | 
 | ||||||
|  | > The | ||||||
|  | > The capital | ||||||
|  | > The capital of | ||||||
|  | > The capital of France | ||||||
|  | > The capital of France is | ||||||
|  | > The capital of France is Paris | ||||||
|  | > The capital of France is Paris. | ||||||
|  | > Full response: The capital of France is Paris. | ||||||
|  | 
 | ||||||
| ## Try asking a question from general topics. | ## Try asking a question from general topics. | ||||||
| 
 | 
 | ||||||
| ```java | ```java | ||||||
| @@ -55,7 +91,7 @@ public class Main { | |||||||
|         String prompt = "List all cricket world cup teams of 2019."; |         String prompt = "List all cricket world cup teams of 2019."; | ||||||
| 
 | 
 | ||||||
|         OllamaResult result = |         OllamaResult result = | ||||||
|                 ollamaAPI.ask(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build()); |                 ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build()); | ||||||
| 
 | 
 | ||||||
|         System.out.println(result.getResponse()); |         System.out.println(result.getResponse()); | ||||||
|     } |     } | ||||||
| @@ -97,7 +133,7 @@ public class Main { | |||||||
|                 SamplePrompts.getSampleDatabasePromptWithQuestion( |                 SamplePrompts.getSampleDatabasePromptWithQuestion( | ||||||
|                         "List all customer names who have bought one or more products"); |                         "List all customer names who have bought one or more products"); | ||||||
|         OllamaResult result = |         OllamaResult result = | ||||||
|                 ollamaAPI.ask(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build()); |                 ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build()); | ||||||
|         System.out.println(result.getResponse()); |         System.out.println(result.getResponse()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| @@ -105,7 +141,7 @@ public class Main { | |||||||
| ``` | ``` | ||||||
| 
 | 
 | ||||||
| _Note: Here I've used | _Note: Here I've used | ||||||
| a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | ||||||
| containing a database schema from within this library for demonstration purposes._ | containing a database schema from within this library for demonstration purposes._ | ||||||
| 
 | 
 | ||||||
| You'd then get a response from the model: | You'd then get a response from the model: | ||||||
| @@ -1,5 +1,5 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 5 | sidebar_position: 6 | ||||||
| --- | --- | ||||||
| 
 | 
 | ||||||
| # Prompt Builder | # Prompt Builder | ||||||
| @@ -42,7 +42,7 @@ public class AskPhi { | |||||||
|                         .addSeparator() |                         .addSeparator() | ||||||
|                         .add("How do I read a file in Go and print its contents to stdout?"); |                         .add("How do I read a file in Go and print its contents to stdout?"); | ||||||
| 
 | 
 | ||||||
|         OllamaResult response = ollamaAPI.ask(model, promptBuilder.build()); |         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); | ||||||
|         System.out.println(response.getResponse()); |         System.out.println(response.getResponse()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "label": "APIs - Model Management", |   "label": "APIs - Model Management", | ||||||
|   "position": 4, |   "position": 2, | ||||||
|   "link": { |   "link": { | ||||||
|     "type": "generated-index", |     "type": "generated-index", | ||||||
|     "description": "Details of APIs to manage LLMs." |     "description": "Details of APIs to manage LLMs." | ||||||
|   | |||||||
| @@ -84,7 +84,7 @@ Add the dependency to your project's `pom.xml`. | |||||||
| </dependency> | </dependency> | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). | Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j). | ||||||
|  |  | ||||||
| You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | ||||||
| example, | example, | ||||||
|   | |||||||
| @@ -20,7 +20,7 @@ const config = { | |||||||
|  |  | ||||||
|     // GitHub pages deployment config. |     // GitHub pages deployment config. | ||||||
|     // If you aren't using GitHub pages, you don't need these. |     // If you aren't using GitHub pages, you don't need these. | ||||||
|     organizationName: 'amithkoujalgi', // Usually your GitHub org/user name. |     organizationName: 'ollama4j', // Usually your GitHub org/user name. | ||||||
|     projectName: 'ollama4j', // Usually your repo name. |     projectName: 'ollama4j', // Usually your repo name. | ||||||
|  |  | ||||||
|     onBrokenLinks: 'throw', |     onBrokenLinks: 'throw', | ||||||
| @@ -40,18 +40,20 @@ const config = { | |||||||
|             /** @type {import('@docusaurus/preset-classic').Options} */ |             /** @type {import('@docusaurus/preset-classic').Options} */ | ||||||
|             ({ |             ({ | ||||||
|                 docs: { |                 docs: { | ||||||
|  |                     path: 'docs', | ||||||
|  |                     routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro. | ||||||
|                     sidebarPath: './sidebars.js', |                     sidebarPath: './sidebars.js', | ||||||
|                     // Please change this to your repo. |                     // Please change this to your repo. | ||||||
|                     // Remove this to remove the "edit this page" links. |                     // Remove this to remove the "edit this page" links. | ||||||
|                     editUrl: |                     editUrl: | ||||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', |                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||||
|                 }, |                 }, | ||||||
|                 blog: { |                 blog: { | ||||||
|                     showReadingTime: true, |                     showReadingTime: true, | ||||||
|                     // Please change this to your repo. |                     // Please change this to your repo. | ||||||
|                     // Remove this to remove the "edit this page" links. |                     // Remove this to remove the "edit this page" links. | ||||||
|                     editUrl: |                     editUrl: | ||||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', |                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||||
|                 }, |                 }, | ||||||
|                 theme: { |                 theme: { | ||||||
|                     customCss: './src/css/custom.css', |                     customCss: './src/css/custom.css', | ||||||
| @@ -78,10 +80,11 @@ const config = { | |||||||
|                         position: 'left', |                         position: 'left', | ||||||
|                         label: 'Docs', |                         label: 'Docs', | ||||||
|                     }, |                     }, | ||||||
|                     {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, |                     {to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, | ||||||
|  |                     {to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, | ||||||
|                     {to: '/blog', label: 'Blog', position: 'left'}, |                     {to: '/blog', label: 'Blog', position: 'left'}, | ||||||
|                     { |                     { | ||||||
|                         href: 'https://github.com/amithkoujalgi/ollama4j', |                         href: 'https://github.com/ollama4j/ollama4j', | ||||||
|                         label: 'GitHub', |                         label: 'GitHub', | ||||||
|                         position: 'right', |                         position: 'right', | ||||||
|                     }, |                     }, | ||||||
| @@ -95,7 +98,7 @@ const config = { | |||||||
|                         items: [ |                         items: [ | ||||||
|                             { |                             { | ||||||
|                                 label: 'Tutorial', |                                 label: 'Tutorial', | ||||||
|                                 to: '/docs/intro', |                                 to: '/intro', | ||||||
|                             }, |                             }, | ||||||
|                         ], |                         ], | ||||||
|                     }, |                     }, | ||||||
| @@ -121,7 +124,7 @@ const config = { | |||||||
|                             }, |                             }, | ||||||
|                             { |                             { | ||||||
|                                 label: 'GitHub', |                                 label: 'GitHub', | ||||||
|                                 href: 'https://github.com/amithkoujalgi/ollama4j', |                                 href: 'https://github.com/ollama4j/ollama4j', | ||||||
|                             }, |                             }, | ||||||
|                         ], |                         ], | ||||||
|                     }, |                     }, | ||||||
|   | |||||||
							
								
								
									
										1947
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1947
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -14,9 +14,9 @@ | |||||||
|     "write-heading-ids": "docusaurus write-heading-ids" |     "write-heading-ids": "docusaurus write-heading-ids" | ||||||
|   }, |   }, | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@docusaurus/core": "3.0.1", |     "@docusaurus/core": "^3.4.0", | ||||||
|     "@docusaurus/preset-classic": "3.0.1", |     "@docusaurus/preset-classic": "^3.4.0", | ||||||
|     "@docusaurus/theme-mermaid": "^3.0.1", |     "@docusaurus/theme-mermaid": "^3.4.0", | ||||||
|     "@mdx-js/react": "^3.0.0", |     "@mdx-js/react": "^3.0.0", | ||||||
|     "clsx": "^2.0.0", |     "clsx": "^2.0.0", | ||||||
|     "prism-react-renderer": "^2.3.0", |     "prism-react-renderer": "^2.3.0", | ||||||
| @@ -24,8 +24,8 @@ | |||||||
|     "react-dom": "^18.0.0" |     "react-dom": "^18.0.0" | ||||||
|   }, |   }, | ||||||
|   "devDependencies": { |   "devDependencies": { | ||||||
|     "@docusaurus/module-type-aliases": "3.0.1", |     "@docusaurus/module-type-aliases": "^3.4.0", | ||||||
|     "@docusaurus/types": "3.0.1" |     "@docusaurus/types": "^3.4.0" | ||||||
|   }, |   }, | ||||||
|   "browserslist": { |   "browserslist": { | ||||||
|     "production": [ |     "production": [ | ||||||
|   | |||||||
| @@ -19,7 +19,7 @@ function HomepageHeader() { | |||||||
|             <div className={styles.buttons}> |             <div className={styles.buttons}> | ||||||
|                 <Link |                 <Link | ||||||
|                     className="button button--secondary button--lg" |                     className="button button--secondary button--lg" | ||||||
|                     to="/docs/intro"> |                     to="/intro"> | ||||||
|                     Getting Started |                     Getting Started | ||||||
|                 </Link> |                 </Link> | ||||||
|             </div> |             </div> | ||||||
|   | |||||||
							
								
								
									
										114
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										114
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -1,14 +1,16 @@ | |||||||
| <?xml version="1.0" encoding="UTF-8"?> | <?xml version="1.0" encoding="UTF-8"?> | ||||||
| <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||||
|  |          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||||||
|     <modelVersion>4.0.0</modelVersion> |     <modelVersion>4.0.0</modelVersion> | ||||||
|  |  | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.ollama4j</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.45</version> |     <version>ollama4j-revision</version> | ||||||
|  |  | ||||||
|     <name>Ollama4j</name> |     <name>Ollama4j</name> | ||||||
|     <description>Java library for interacting with Ollama API.</description> |     <description>Java library for interacting with Ollama API.</description> | ||||||
|     <url>https://github.com/amithkoujalgi/ollama4j</url> |     <url>https://github.com/ollama4j/ollama4j</url> | ||||||
|  |     <packaging>jar</packaging> | ||||||
|  |  | ||||||
|     <properties> |     <properties> | ||||||
|         <maven.compiler.source>11</maven.compiler.source> |         <maven.compiler.source>11</maven.compiler.source> | ||||||
| @@ -31,15 +33,15 @@ | |||||||
|     <licenses> |     <licenses> | ||||||
|         <license> |         <license> | ||||||
|             <name>MIT License</name> |             <name>MIT License</name> | ||||||
|             <url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url> |             <url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url> | ||||||
|         </license> |         </license> | ||||||
|     </licenses> |     </licenses> | ||||||
|  |  | ||||||
|     <scm> |     <scm> | ||||||
|         <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> |         <connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection> | ||||||
|         <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> |         <developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection> | ||||||
|         <url>https://github.com/amithkoujalgi/ollama4j</url> |         <url>https://github.com/ollama4j/ollama4j</url> | ||||||
|         <tag>v1.0.45</tag> |         <tag>ollama4j-revision</tag> | ||||||
|     </scm> |     </scm> | ||||||
|  |  | ||||||
|     <build> |     <build> | ||||||
| @@ -70,27 +72,7 @@ | |||||||
|                     </execution> |                     </execution> | ||||||
|                 </executions> |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|             <!--            <plugin>--> |  | ||||||
|             <!--                <groupId>org.apache.maven.plugins</groupId>--> |  | ||||||
|             <!--                <artifactId>maven-gpg-plugin</artifactId>--> |  | ||||||
|             <!--                <version>1.5</version>--> |  | ||||||
|             <!--                <executions>--> |  | ||||||
|             <!--                    <execution>--> |  | ||||||
|             <!--                        <id>sign-artifacts</id>--> |  | ||||||
|             <!--                        <phase>verify</phase>--> |  | ||||||
|             <!--                        <goals>--> |  | ||||||
|             <!--                            <goal>sign</goal>--> |  | ||||||
|             <!--                        </goals>--> |  | ||||||
|             <!--                        <configuration>--> |  | ||||||
|             <!--                            <!– This is necessary for gpg to not try to use the pinentry programs –>--> |  | ||||||
|             <!--                            <gpgArguments>--> |  | ||||||
|             <!--                                <arg>--pinentry-mode</arg>--> |  | ||||||
|             <!--                                <arg>loopback</arg>--> |  | ||||||
|             <!--                            </gpgArguments>--> |  | ||||||
|             <!--                        </configuration>--> |  | ||||||
|             <!--                    </execution>--> |  | ||||||
|             <!--                </executions>--> |  | ||||||
|             <!--            </plugin>--> |  | ||||||
|             <!-- Surefire Plugin for Unit Tests --> |             <!-- Surefire Plugin for Unit Tests --> | ||||||
|             <plugin> |             <plugin> | ||||||
|                 <groupId>org.apache.maven.plugins</groupId> |                 <groupId>org.apache.maven.plugins</groupId> | ||||||
| @@ -99,7 +81,7 @@ | |||||||
|                 <configuration> |                 <configuration> | ||||||
|                     <skipTests>${skipUnitTests}</skipTests> |                     <skipTests>${skipUnitTests}</skipTests> | ||||||
|                     <includes> |                     <includes> | ||||||
|                         <include>**/unittests/*.java</include> |                         <include>**/unittests/**/*.java</include> | ||||||
|                     </includes> |                     </includes> | ||||||
|                 </configuration> |                 </configuration> | ||||||
|             </plugin> |             </plugin> | ||||||
| @@ -127,15 +109,23 @@ | |||||||
|                     </execution> |                     </execution> | ||||||
|                 </executions> |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|  |  | ||||||
|  |  | ||||||
|             <plugin> |             <plugin> | ||||||
|                 <groupId>org.apache.maven.plugins</groupId> |                 <groupId>org.apache.maven.plugins</groupId> | ||||||
|                 <artifactId>maven-release-plugin</artifactId> |                 <artifactId>maven-gpg-plugin</artifactId> | ||||||
|                 <version>3.0.1</version> |                 <version>1.5</version> | ||||||
|                 <configuration> |                 <executions> | ||||||
|                     <!--                    <goals>install</goals>--> |                     <execution> | ||||||
|                     <tagNameFormat>v@{project.version}</tagNameFormat> |                         <id>sign-artifacts</id> | ||||||
|                 </configuration> |                         <phase>verify</phase> | ||||||
|  |                         <goals> | ||||||
|  |                             <goal>sign</goal> | ||||||
|  |                         </goals> | ||||||
|  |                     </execution> | ||||||
|  |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|  |  | ||||||
|         </plugins> |         </plugins> | ||||||
|     </build> |     </build> | ||||||
|  |  | ||||||
| @@ -149,12 +139,17 @@ | |||||||
|         <dependency> |         <dependency> | ||||||
|             <groupId>com.fasterxml.jackson.core</groupId> |             <groupId>com.fasterxml.jackson.core</groupId> | ||||||
|             <artifactId>jackson-databind</artifactId> |             <artifactId>jackson-databind</artifactId> | ||||||
|             <version>2.15.3</version> |             <version>2.17.1</version> | ||||||
|  |         </dependency> | ||||||
|  |         <dependency> | ||||||
|  |             <groupId>com.fasterxml.jackson.datatype</groupId> | ||||||
|  |             <artifactId>jackson-datatype-jsr310</artifactId> | ||||||
|  |             <version>2.17.1</version> | ||||||
|         </dependency> |         </dependency> | ||||||
|         <dependency> |         <dependency> | ||||||
|             <groupId>ch.qos.logback</groupId> |             <groupId>ch.qos.logback</groupId> | ||||||
|             <artifactId>logback-classic</artifactId> |             <artifactId>logback-classic</artifactId> | ||||||
|             <version>1.4.12</version> |             <version>1.5.6</version> | ||||||
|             <scope>test</scope> |             <scope>test</scope> | ||||||
|         </dependency> |         </dependency> | ||||||
|         <dependency> |         <dependency> | ||||||
| @@ -174,20 +169,47 @@ | |||||||
|             <version>4.1.0</version> |             <version>4.1.0</version> | ||||||
|             <scope>test</scope> |             <scope>test</scope> | ||||||
|         </dependency> |         </dependency> | ||||||
|  |         <dependency> | ||||||
|  |             <groupId>org.json</groupId> | ||||||
|  |             <artifactId>json</artifactId> | ||||||
|  |             <version>20240205</version> | ||||||
|  |             <scope>test</scope> | ||||||
|  |         </dependency> | ||||||
|     </dependencies> |     </dependencies> | ||||||
|  |  | ||||||
|     <distributionManagement> |     <distributionManagement> | ||||||
|         <snapshotRepository> |  | ||||||
|             <id>ossrh</id> |  | ||||||
|             <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url> |  | ||||||
|         </snapshotRepository> |  | ||||||
|         <repository> |         <repository> | ||||||
|             <id>ossrh</id> |             <id>mvn-repo-id</id> | ||||||
|             <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url> |  | ||||||
|         </repository> |         </repository> | ||||||
|     </distributionManagement> |     </distributionManagement> | ||||||
|  |  | ||||||
|     <profiles> |     <profiles> | ||||||
|  |         <profile> | ||||||
|  |             <id>ossrh</id> | ||||||
|  |             <activation> | ||||||
|  |                 <activeByDefault>true</activeByDefault> | ||||||
|  |             </activation> | ||||||
|  |             <properties> | ||||||
|  |                 <gpg.executable>gpg2</gpg.executable> | ||||||
|  |                 <test.env>unit</test.env> | ||||||
|  |                 <skipUnitTests>false</skipUnitTests> | ||||||
|  |                 <skipIntegrationTests>true</skipIntegrationTests> | ||||||
|  |             </properties> | ||||||
|  |             <build> | ||||||
|  |                 <plugins> | ||||||
|  |                     <plugin> | ||||||
|  |                         <groupId>org.sonatype.central</groupId> | ||||||
|  |                         <artifactId>central-publishing-maven-plugin</artifactId> | ||||||
|  |                         <version>0.5.0</version> | ||||||
|  |                         <extensions>true</extensions> | ||||||
|  |                         <configuration> | ||||||
|  |                             <publishingServerId>mvn-repo-id</publishingServerId> | ||||||
|  |                             <autoPublish>true</autoPublish> | ||||||
|  |                         </configuration> | ||||||
|  |                     </plugin> | ||||||
|  |                 </plugins> | ||||||
|  |             </build> | ||||||
|  |         </profile> | ||||||
|         <profile> |         <profile> | ||||||
|             <id>unit-tests</id> |             <id>unit-tests</id> | ||||||
|             <properties> |             <properties> | ||||||
| @@ -196,7 +218,7 @@ | |||||||
|                 <skipIntegrationTests>true</skipIntegrationTests> |                 <skipIntegrationTests>true</skipIntegrationTests> | ||||||
|             </properties> |             </properties> | ||||||
|             <activation> |             <activation> | ||||||
|                 <activeByDefault>true</activeByDefault> |                 <activeByDefault>false</activeByDefault> | ||||||
|             </activation> |             </activation> | ||||||
|             <build> |             <build> | ||||||
|                 <plugins> |                 <plugins> | ||||||
|   | |||||||
| @@ -1,22 +1,28 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core; | package io.github.amithkoujalgi.ollama4j.core; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolNotFoundException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.*; | import io.github.amithkoujalgi.ollama4j.core.models.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.request.*; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import java.io.BufferedReader; | import lombok.Setter; | ||||||
| import java.io.ByteArrayOutputStream; | import org.slf4j.Logger; | ||||||
| import java.io.File; | import org.slf4j.LoggerFactory; | ||||||
| import java.io.IOException; |  | ||||||
| import java.io.InputStream; | import java.io.*; | ||||||
| import java.io.InputStreamReader; |  | ||||||
| import java.net.URI; | import java.net.URI; | ||||||
| import java.net.URISyntaxException; | import java.net.URISyntaxException; | ||||||
| import java.net.URL; |  | ||||||
| import java.net.http.HttpClient; | import java.net.http.HttpClient; | ||||||
| import java.net.http.HttpConnectTimeoutException; | import java.net.http.HttpConnectTimeoutException; | ||||||
| import java.net.http.HttpRequest; | import java.net.http.HttpRequest; | ||||||
| @@ -24,22 +30,32 @@ import java.net.http.HttpResponse; | |||||||
| import java.nio.charset.StandardCharsets; | import java.nio.charset.StandardCharsets; | ||||||
| import java.nio.file.Files; | import java.nio.file.Files; | ||||||
| import java.time.Duration; | import java.time.Duration; | ||||||
| import java.util.ArrayList; | import java.util.*; | ||||||
| import java.util.Base64; |  | ||||||
| import java.util.List; |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| /** The base Ollama API class. */ | /** | ||||||
|  |  * The base Ollama API class. | ||||||
|  |  */ | ||||||
| @SuppressWarnings("DuplicatedCode") | @SuppressWarnings("DuplicatedCode") | ||||||
| public class OllamaAPI { | public class OllamaAPI { | ||||||
|  |  | ||||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); |     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||||
|     private final String host; |     private final String host; | ||||||
|   private long requestTimeoutSeconds = 3; |     /** | ||||||
|  |      * -- SETTER -- | ||||||
|  |      * Set request timeout in seconds. Default is 3 seconds. | ||||||
|  |      */ | ||||||
|  |     @Setter | ||||||
|  |     private long requestTimeoutSeconds = 10; | ||||||
|  |     /** | ||||||
|  |      * -- SETTER -- | ||||||
|  |      * Set/unset logging of responses | ||||||
|  |      */ | ||||||
|  |     @Setter | ||||||
|     private boolean verbose = true; |     private boolean verbose = true; | ||||||
|     private BasicAuth basicAuth; |     private BasicAuth basicAuth; | ||||||
|  |  | ||||||
|  |     private final ToolRegistry toolRegistry = new ToolRegistry(); | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Instantiates the Ollama API. |      * Instantiates the Ollama API. | ||||||
|      * |      * | ||||||
| @@ -53,24 +69,6 @@ public class OllamaAPI { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Set request timeout in seconds. Default is 3 seconds. |  | ||||||
|    * |  | ||||||
|    * @param requestTimeoutSeconds the request timeout in seconds |  | ||||||
|    */ |  | ||||||
|   public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Set/unset logging of responses |  | ||||||
|    * |  | ||||||
|    * @param verbose true/false |  | ||||||
|    */ |  | ||||||
|   public void setVerbose(boolean verbose) { |  | ||||||
|     this.verbose = verbose; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. |      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. | ||||||
|      * |      * | ||||||
| @@ -308,8 +306,18 @@ public class OllamaAPI { | |||||||
|      */ |      */ | ||||||
|     public List<Double> generateEmbeddings(String model, String prompt) |     public List<Double> generateEmbeddings(String model, String prompt) | ||||||
|             throws IOException, InterruptedException, OllamaBaseException { |             throws IOException, InterruptedException, OllamaBaseException { | ||||||
|  |         return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. | ||||||
|  |      * | ||||||
|  |      * @param modelRequest request for '/api/embeddings' endpoint | ||||||
|  |      * @return embeddings | ||||||
|  |      */ | ||||||
|  |     public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||||
|         URI uri = URI.create(this.host + "/api/embeddings"); |         URI uri = URI.create(this.host + "/api/embeddings"); | ||||||
|     String jsonData = new ModelEmbeddingsRequest(model, prompt).toString(); |         String jsonData = modelRequest.toString(); | ||||||
|         HttpClient httpClient = HttpClient.newHttpClient(); |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
|         HttpRequest.Builder requestBuilder = |         HttpRequest.Builder requestBuilder = | ||||||
|                 getRequestBuilderDefault(uri) |                 getRequestBuilderDefault(uri) | ||||||
| @@ -320,49 +328,106 @@ public class OllamaAPI { | |||||||
|         int statusCode = response.statusCode(); |         int statusCode = response.statusCode(); | ||||||
|         String responseBody = response.body(); |         String responseBody = response.body(); | ||||||
|         if (statusCode == 200) { |         if (statusCode == 200) { | ||||||
|       EmbeddingResponse embeddingResponse = |             OllamaEmbeddingResponseModel embeddingResponse = | ||||||
|           Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class); |                     Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||||
|             return embeddingResponse.getEmbedding(); |             return embeddingResponse.getEmbedding(); | ||||||
|         } else { |         } else { | ||||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); |             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|    * Ask a question to a model running on Ollama server. This is a sync/blocking call. |      * Generate response for a question to a model running on Ollama server. This is a sync/blocking | ||||||
|  |      * call. | ||||||
|      * |      * | ||||||
|      * @param model         the ollama model to ask the question to |      * @param model         the ollama model to ask the question to | ||||||
|      * @param prompt        the prompt/question text |      * @param prompt        the prompt/question text | ||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|   public OllamaResult ask(String model, String prompt, Options options) |     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |         ollamaRequestModel.setRaw(raw); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return askSync(ollamaRequestModel); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|    * Ask a question to a model running on Ollama server and get a callback handle that can be used |      * Generates response using the specified AI model and prompt (in blocking mode). | ||||||
|    * to check for status and get the response from the model later. This would be an |      * <p> | ||||||
|    * async/non-blocking call. |      * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} | ||||||
|  |      * | ||||||
|  |      * @param model   The name or identifier of the AI model to use for generating the response. | ||||||
|  |      * @param prompt  The input text or prompt to provide to the AI model. | ||||||
|  |      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||||
|  |      * @param options Additional options or configurations to use when generating the response. | ||||||
|  |      * @return {@link OllamaResult} | ||||||
|  |      */ | ||||||
|  |     public OllamaResult generate(String model, String prompt, boolean raw, Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         return generate(model, prompt, raw, options, null); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools | ||||||
|  |      * on the generated response. | ||||||
|  |      * | ||||||
|  |      * @param model   The name or identifier of the AI model to use for generating the response. | ||||||
|  |      * @param prompt  The input text or prompt to provide to the AI model. | ||||||
|  |      * @param options Additional options or configurations to use when generating the response. | ||||||
|  |      * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. | ||||||
|  |      * @throws OllamaBaseException  If there is an error related to the Ollama API or service. | ||||||
|  |      * @throws IOException          If there is an error related to input/output operations. | ||||||
|  |      * @throws InterruptedException If the method is interrupted while waiting for the AI model | ||||||
|  |      *                              to generate the response or for the tools to be invoked. | ||||||
|  |      */ | ||||||
|  |     public OllamaToolsResult generateWithTools(String model, String prompt, Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||||
|  |         boolean raw = true; | ||||||
|  |         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||||
|  |         Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>(); | ||||||
|  |  | ||||||
|  |         OllamaResult result = generate(model, prompt, raw, options, null); | ||||||
|  |         toolResult.setModelResult(result); | ||||||
|  |  | ||||||
|  |         String toolsResponse = result.getResponse(); | ||||||
|  |         if (toolsResponse.contains("[TOOL_CALLS]")) { | ||||||
|  |             toolsResponse = toolsResponse.replace("[TOOL_CALLS]", ""); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class)); | ||||||
|  |         for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) { | ||||||
|  |             toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec)); | ||||||
|  |         } | ||||||
|  |         toolResult.setToolResults(toolResults); | ||||||
|  |         return toolResult; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||||
|  |      * that can be used to check for status and get the response from the model later. This would be | ||||||
|  |      * an async/non-blocking call. | ||||||
|      * |      * | ||||||
|      * @param model  the ollama model to ask the question to |      * @param model  the ollama model to ask the question to | ||||||
|      * @param prompt the prompt/question text |      * @param prompt the prompt/question text | ||||||
|      * @return the ollama async result callback handle |      * @return the ollama async result callback handle | ||||||
|      */ |      */ | ||||||
|   public OllamaAsyncResultCallback askAsync(String model, String prompt) { |     public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) { | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |         ollamaRequestModel.setRaw(raw); | ||||||
|         URI uri = URI.create(this.host + "/api/generate"); |         URI uri = URI.create(this.host + "/api/generate"); | ||||||
|     OllamaAsyncResultCallback ollamaAsyncResultCallback = |         OllamaAsyncResultStreamer ollamaAsyncResultStreamer = | ||||||
|         new OllamaAsyncResultCallback( |                 new OllamaAsyncResultStreamer( | ||||||
|                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); |                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||||
|     ollamaAsyncResultCallback.start(); |         ollamaAsyncResultStreamer.start(); | ||||||
|     return ollamaAsyncResultCallback; |         return ollamaAsyncResultStreamer; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -375,18 +440,30 @@ public class OllamaAPI { | |||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|   public OllamaResult askWithImageFiles( |     public OllamaResult generateWithImageFiles( | ||||||
|       String model, String prompt, List<File> imageFiles, Options options) |             String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         List<String> images = new ArrayList<>(); |         List<String> images = new ArrayList<>(); | ||||||
|         for (File imageFile : imageFiles) { |         for (File imageFile : imageFiles) { | ||||||
|             images.add(encodeFileToBase64(imageFile)); |             images.add(encodeFileToBase64(imageFile)); | ||||||
|         } |         } | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return askSync(ollamaRequestModel); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Convenience method to call Ollama API without streaming responses. | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||||
|  |      */ | ||||||
|  |     public OllamaResult generateWithImageFiles( | ||||||
|  |             String model, String prompt, List<File> imageFiles, Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -399,20 +476,94 @@ public class OllamaAPI { | |||||||
|      * @param options       the Options object - <a |      * @param options       the Options object - <a | ||||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More |      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||||
|      *                      details on the options</a> |      *                      details on the options</a> | ||||||
|  |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|   public OllamaResult askWithImageURLs( |     public OllamaResult generateWithImageURLs( | ||||||
|       String model, String prompt, List<String> imageURLs, Options options) |             String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { |             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||||
|         List<String> images = new ArrayList<>(); |         List<String> images = new ArrayList<>(); | ||||||
|         for (String imageURL : imageURLs) { |         for (String imageURL : imageURLs) { | ||||||
|       images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL))); |             images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); | ||||||
|         } |         } | ||||||
|     OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return askSync(ollamaRequestModel); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Convenience method to call Ollama API without streaming responses. | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||||
|  |      */ | ||||||
|  |     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, | ||||||
|  |                                               Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||||
|  |         return generateWithImageURLs(model, prompt, imageURLs, options, null); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api | ||||||
|  |      * 'api/chat'. | ||||||
|  |      * | ||||||
|  |      * @param model    the ollama model to ask the question to | ||||||
|  |      * @param messages chat history / message stack to send to the model | ||||||
|  |      * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. | ||||||
|  |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|  |      * @throws IOException          in case the responseStream can not be read | ||||||
|  |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|  |      */ | ||||||
|  |     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||||
|  |         return chat(builder.withMessages(messages).build()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|  |      * <p> | ||||||
|  |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|  |      * | ||||||
|  |      * @param request request object to be sent to the server | ||||||
|  |      * @return {@link OllamaChatResult} | ||||||
|  |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|  |      * @throws IOException          in case the responseStream can not be read | ||||||
|  |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|  |      */ | ||||||
|  |     public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         return chat(request, null); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|  |      * <p> | ||||||
|  |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|  |      * | ||||||
|  |      * @param request       request object to be sent to the server | ||||||
|  |      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) | ||||||
|  |      * @return {@link OllamaChatResult} | ||||||
|  |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|  |      * @throws IOException          in case the responseStream can not be read | ||||||
|  |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|  |      */ | ||||||
|  |     public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|  |         OllamaResult result; | ||||||
|  |         if (streamHandler != null) { | ||||||
|  |             request.setStream(true); | ||||||
|  |             result = requestCaller.call(request, streamHandler); | ||||||
|  |         } else { | ||||||
|  |             result = requestCaller.callSync(request); | ||||||
|  |         } | ||||||
|  |         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void registerTool(Tools.ToolSpecification toolSpecification) { | ||||||
|  |         toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // technical private methods // | ||||||
|  |  | ||||||
|     private static String encodeFileToBase64(File file) throws IOException { |     private static String encodeFileToBase64(File file) throws IOException { | ||||||
|         return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); |         return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); | ||||||
|     } |     } | ||||||
| @@ -421,72 +572,19 @@ public class OllamaAPI { | |||||||
|         return Base64.getEncoder().encodeToString(bytes); |         return Base64.getEncoder().encodeToString(bytes); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   private static byte[] loadImageBytesFromUrl(String imageUrl) |     private OllamaResult generateSyncForOllamaRequestModel( | ||||||
|       throws IOException, URISyntaxException { |             OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) | ||||||
|     URL url = new URI(imageUrl).toURL(); |  | ||||||
|     try (InputStream in = url.openStream(); |  | ||||||
|         ByteArrayOutputStream out = new ByteArrayOutputStream()) { |  | ||||||
|       byte[] buffer = new byte[1024]; |  | ||||||
|       int bytesRead; |  | ||||||
|       while ((bytesRead = in.read(buffer)) != -1) { |  | ||||||
|         out.write(buffer, 0, bytesRead); |  | ||||||
|       } |  | ||||||
|       return out.toByteArray(); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private OllamaResult askSync(OllamaRequestModel ollamaRequestModel) |  | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     long startTime = System.currentTimeMillis(); |         OllamaGenerateEndpointCaller requestCaller = | ||||||
|     HttpClient httpClient = HttpClient.newHttpClient(); |                 new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|     URI uri = URI.create(this.host + "/api/generate"); |         OllamaResult result; | ||||||
|     HttpRequest.Builder requestBuilder = |         if (streamHandler != null) { | ||||||
|         getRequestBuilderDefault(uri) |             ollamaRequestModel.setStream(true); | ||||||
|             .POST( |             result = requestCaller.call(ollamaRequestModel, streamHandler); | ||||||
|                 HttpRequest.BodyPublishers.ofString( |  | ||||||
|                     Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))); |  | ||||||
|     HttpRequest request = requestBuilder.build(); |  | ||||||
|     if (verbose) logger.info("Asking model: " + ollamaRequestModel); |  | ||||||
|     HttpResponse<InputStream> response = |  | ||||||
|         httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); |  | ||||||
|     int statusCode = response.statusCode(); |  | ||||||
|     InputStream responseBodyStream = response.body(); |  | ||||||
|     StringBuilder responseBuffer = new StringBuilder(); |  | ||||||
|     try (BufferedReader reader = |  | ||||||
|         new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { |  | ||||||
|       String line; |  | ||||||
|       while ((line = reader.readLine()) != null) { |  | ||||||
|         if (statusCode == 404) { |  | ||||||
|           logger.warn("Status code: 404 (Not Found)"); |  | ||||||
|           OllamaErrorResponseModel ollamaResponseModel = |  | ||||||
|               Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); |  | ||||||
|           responseBuffer.append(ollamaResponseModel.getError()); |  | ||||||
|         } else if (statusCode == 401) { |  | ||||||
|           logger.warn("Status code: 401 (Unauthorized)"); |  | ||||||
|           OllamaErrorResponseModel ollamaResponseModel = |  | ||||||
|               Utils.getObjectMapper() |  | ||||||
|                   .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); |  | ||||||
|           responseBuffer.append(ollamaResponseModel.getError()); |  | ||||||
|         } else { |         } else { | ||||||
|           OllamaResponseModel ollamaResponseModel = |             result = requestCaller.callSync(ollamaRequestModel); | ||||||
|               Utils.getObjectMapper().readValue(line, OllamaResponseModel.class); |  | ||||||
|           if (!ollamaResponseModel.isDone()) { |  | ||||||
|             responseBuffer.append(ollamaResponseModel.getResponse()); |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if (statusCode != 200) { |  | ||||||
|       logger.error("Status code " + statusCode); |  | ||||||
|       throw new OllamaBaseException(responseBuffer.toString()); |  | ||||||
|     } else { |  | ||||||
|       long endTime = System.currentTimeMillis(); |  | ||||||
|       OllamaResult ollamaResult = |  | ||||||
|           new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode); |  | ||||||
|       if (verbose) logger.info("Model response: " + ollamaResult); |  | ||||||
|       return ollamaResult; |  | ||||||
|         } |         } | ||||||
|  |         return result; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -524,4 +622,22 @@ public class OllamaAPI { | |||||||
|     private boolean isBasicAuthCredentialsSet() { |     private boolean isBasicAuthCredentialsSet() { | ||||||
|         return basicAuth != null; |         return basicAuth != null; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { | ||||||
|  |         try { | ||||||
|  |             String methodName = toolFunctionCallSpec.getName(); | ||||||
|  |             Map<String, Object> arguments = toolFunctionCallSpec.getArguments(); | ||||||
|  |             ToolFunction function = toolRegistry.getFunction(methodName); | ||||||
|  |             if (verbose) { | ||||||
|  |                 logger.debug("Invoking function {} with arguments {}", methodName, arguments); | ||||||
|  |             } | ||||||
|  |             if (function == null) { | ||||||
|  |                 throw new ToolNotFoundException("No such tool: " + methodName); | ||||||
|  |             } | ||||||
|  |             return function.apply(arguments); | ||||||
|  |         } catch (Exception e) { | ||||||
|  |             throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,18 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core; | ||||||
|  |  | ||||||
|  | import java.util.Iterator; | ||||||
|  | import java.util.LinkedList; | ||||||
|  | import java.util.Queue; | ||||||
|  |  | ||||||
|  | public class OllamaResultStream extends LinkedList<String> implements Queue<String> { | ||||||
|  |     @Override | ||||||
|  |     public String poll() { | ||||||
|  |         StringBuilder tokens = new StringBuilder(); | ||||||
|  |         Iterator<String> iterator = this.listIterator(); | ||||||
|  |         while (iterator.hasNext()) { | ||||||
|  |             tokens.append(iterator.next()); | ||||||
|  |             iterator.remove(); | ||||||
|  |         } | ||||||
|  |         return tokens.toString(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.exceptions; | ||||||
|  |  | ||||||
|  | public class ToolInvocationException extends Exception { | ||||||
|  |  | ||||||
|  |     public ToolInvocationException(String s, Exception e) { | ||||||
|  |         super(s, e); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.exceptions; | ||||||
|  |  | ||||||
|  | public class ToolNotFoundException extends Exception { | ||||||
|  |  | ||||||
|  |     public ToolNotFoundException(String s) { | ||||||
|  |         super(s); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,14 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.impl; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | public class ConsoleOutputStreamHandler implements OllamaStreamHandler { | ||||||
|  |     private final StringBuffer response = new StringBuffer(); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void accept(String message) { | ||||||
|  |         String substr = message.substring(response.length()); | ||||||
|  |         response.append(substr); | ||||||
|  |         System.out.print(substr); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,14 +1,22 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import java.time.LocalDateTime; | ||||||
|  | import java.time.OffsetDateTime; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| public class Model { | public class Model { | ||||||
|  |  | ||||||
|   private String name; |   private String name; | ||||||
|  |   private String model; | ||||||
|   @JsonProperty("modified_at") |   @JsonProperty("modified_at") | ||||||
|   private String modifiedAt; |   private OffsetDateTime modifiedAt; | ||||||
|  |   @JsonProperty("expires_at") | ||||||
|  |   private OffsetDateTime expiresAt; | ||||||
|   private String digest; |   private String digest; | ||||||
|   private long size; |   private long size; | ||||||
|   @JsonProperty("details") |   @JsonProperty("details") | ||||||
| @@ -33,4 +41,13 @@ public class Model { | |||||||
|     return name.split(":")[1]; |     return name.split(":")[1]; | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models; | |||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| import java.util.Map; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| @@ -16,5 +17,14 @@ public class ModelDetail { | |||||||
|   private String parameters; |   private String parameters; | ||||||
|   private String template; |   private String template; | ||||||
|   private String system; |   private String system; | ||||||
|   private Map<String, String> details; |   private ModelMeta details; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models; | |||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| @@ -21,4 +23,13 @@ public class ModelMeta { | |||||||
|  |  | ||||||
|   @JsonProperty("quantization_level") |   @JsonProperty("quantization_level") | ||||||
|   private String quantizationLevel; |   private String quantizationLevel; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,141 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; |  | ||||||
| import java.io.BufferedReader; |  | ||||||
| import java.io.IOException; |  | ||||||
| import java.io.InputStream; |  | ||||||
| import java.io.InputStreamReader; |  | ||||||
| import java.net.http.HttpClient; |  | ||||||
| import java.net.http.HttpRequest; |  | ||||||
| import java.net.http.HttpResponse; |  | ||||||
| import java.nio.charset.StandardCharsets; |  | ||||||
| import java.time.Duration; |  | ||||||
| import java.util.LinkedList; |  | ||||||
| import java.util.Queue; |  | ||||||
| import lombok.Data; |  | ||||||
| import lombok.EqualsAndHashCode; |  | ||||||
| import lombok.Getter; |  | ||||||
|  |  | ||||||
| @Data |  | ||||||
| @EqualsAndHashCode(callSuper = true) |  | ||||||
| @SuppressWarnings("unused") |  | ||||||
| public class OllamaAsyncResultCallback extends Thread { |  | ||||||
|   private final HttpRequest.Builder requestBuilder; |  | ||||||
|   private final OllamaRequestModel ollamaRequestModel; |  | ||||||
|   private final Queue<String> queue = new LinkedList<>(); |  | ||||||
|   private String result; |  | ||||||
|   private boolean isDone; |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a |  | ||||||
|    * failure. If the request was a failure, the `getResponse()` method will return the error |  | ||||||
|    * message. |  | ||||||
|    */ |  | ||||||
|   @Getter private boolean succeeded; |  | ||||||
|  |  | ||||||
|   private long requestTimeoutSeconds; |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama |  | ||||||
|    * server. |  | ||||||
|    */ |  | ||||||
|   @Getter private int httpStatusCode; |  | ||||||
|  |  | ||||||
|   /** -- GETTER -- Returns the response time in milliseconds. */ |  | ||||||
|   @Getter private long responseTime = 0; |  | ||||||
|  |  | ||||||
|   public OllamaAsyncResultCallback( |  | ||||||
|       HttpRequest.Builder requestBuilder, |  | ||||||
|       OllamaRequestModel ollamaRequestModel, |  | ||||||
|       long requestTimeoutSeconds) { |  | ||||||
|     this.requestBuilder = requestBuilder; |  | ||||||
|     this.ollamaRequestModel = ollamaRequestModel; |  | ||||||
|     this.isDone = false; |  | ||||||
|     this.result = ""; |  | ||||||
|     this.queue.add(""); |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   @Override |  | ||||||
|   public void run() { |  | ||||||
|     HttpClient httpClient = HttpClient.newHttpClient(); |  | ||||||
|     try { |  | ||||||
|       long startTime = System.currentTimeMillis(); |  | ||||||
|       HttpRequest request = |  | ||||||
|           requestBuilder |  | ||||||
|               .POST( |  | ||||||
|                   HttpRequest.BodyPublishers.ofString( |  | ||||||
|                       Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) |  | ||||||
|               .header("Content-Type", "application/json") |  | ||||||
|               .timeout(Duration.ofSeconds(requestTimeoutSeconds)) |  | ||||||
|               .build(); |  | ||||||
|       HttpResponse<InputStream> response = |  | ||||||
|           httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); |  | ||||||
|       int statusCode = response.statusCode(); |  | ||||||
|       this.httpStatusCode = statusCode; |  | ||||||
|  |  | ||||||
|       InputStream responseBodyStream = response.body(); |  | ||||||
|       try (BufferedReader reader = |  | ||||||
|           new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { |  | ||||||
|         String line; |  | ||||||
|         StringBuilder responseBuffer = new StringBuilder(); |  | ||||||
|         while ((line = reader.readLine()) != null) { |  | ||||||
|           if (statusCode == 404) { |  | ||||||
|             OllamaErrorResponseModel ollamaResponseModel = |  | ||||||
|                 Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); |  | ||||||
|             queue.add(ollamaResponseModel.getError()); |  | ||||||
|             responseBuffer.append(ollamaResponseModel.getError()); |  | ||||||
|           } else { |  | ||||||
|             OllamaResponseModel ollamaResponseModel = |  | ||||||
|                 Utils.getObjectMapper().readValue(line, OllamaResponseModel.class); |  | ||||||
|             queue.add(ollamaResponseModel.getResponse()); |  | ||||||
|             if (!ollamaResponseModel.isDone()) { |  | ||||||
|               responseBuffer.append(ollamaResponseModel.getResponse()); |  | ||||||
|             } |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         this.isDone = true; |  | ||||||
|         this.succeeded = true; |  | ||||||
|         this.result = responseBuffer.toString(); |  | ||||||
|         long endTime = System.currentTimeMillis(); |  | ||||||
|         responseTime = endTime - startTime; |  | ||||||
|       } |  | ||||||
|       if (statusCode != 200) { |  | ||||||
|         throw new OllamaBaseException(this.result); |  | ||||||
|       } |  | ||||||
|     } catch (IOException | InterruptedException | OllamaBaseException e) { |  | ||||||
|       this.isDone = true; |  | ||||||
|       this.succeeded = false; |  | ||||||
|       this.result = "[FAILED] " + e.getMessage(); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Returns the status of the thread. This does not indicate that the request was successful or a |  | ||||||
|    * failure, rather it is just a status flag to indicate if the thread is active or ended. |  | ||||||
|    * |  | ||||||
|    * @return boolean - status |  | ||||||
|    */ |  | ||||||
|   public boolean isComplete() { |  | ||||||
|     return isDone; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Returns the final completion/response when the execution completes. Does not return intermediate results. |  | ||||||
|    * |  | ||||||
|    * @return String completion/response text |  | ||||||
|    */ |  | ||||||
|   public String getResponse() { |  | ||||||
|     return result; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public Queue<String> getStream() { |  | ||||||
|     return queue; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,124 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.EqualsAndHashCode; | ||||||
|  | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
|  |  | ||||||
|  | import java.io.BufferedReader; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.io.InputStream; | ||||||
|  | import java.io.InputStreamReader; | ||||||
|  | import java.net.http.HttpClient; | ||||||
|  | import java.net.http.HttpRequest; | ||||||
|  | import java.net.http.HttpResponse; | ||||||
|  | import java.nio.charset.StandardCharsets; | ||||||
|  | import java.time.Duration; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @EqualsAndHashCode(callSuper = true) | ||||||
|  | @SuppressWarnings("unused") | ||||||
|  | public class OllamaAsyncResultStreamer extends Thread { | ||||||
|  |     private final HttpRequest.Builder requestBuilder; | ||||||
|  |     private final OllamaGenerateRequestModel ollamaRequestModel; | ||||||
|  |     private final OllamaResultStream stream = new OllamaResultStream(); | ||||||
|  |     private String completeResponse; | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a | ||||||
|  |      * failure. If the request was a failure, the `getResponse()` method will return the error | ||||||
|  |      * message. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private boolean succeeded; | ||||||
|  |  | ||||||
|  |     @Setter | ||||||
|  |     private long requestTimeoutSeconds; | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama | ||||||
|  |      * server. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private int httpStatusCode; | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the response time in milliseconds. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private long responseTime = 0; | ||||||
|  |  | ||||||
|  |     public OllamaAsyncResultStreamer( | ||||||
|  |             HttpRequest.Builder requestBuilder, | ||||||
|  |             OllamaGenerateRequestModel ollamaRequestModel, | ||||||
|  |             long requestTimeoutSeconds) { | ||||||
|  |         this.requestBuilder = requestBuilder; | ||||||
|  |         this.ollamaRequestModel = ollamaRequestModel; | ||||||
|  |         this.completeResponse = ""; | ||||||
|  |         this.stream.add(""); | ||||||
|  |         this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void run() { | ||||||
|  |         ollamaRequestModel.setStream(true); | ||||||
|  |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
|  |         try { | ||||||
|  |             long startTime = System.currentTimeMillis(); | ||||||
|  |             HttpRequest request = | ||||||
|  |                     requestBuilder | ||||||
|  |                             .POST( | ||||||
|  |                                     HttpRequest.BodyPublishers.ofString( | ||||||
|  |                                             Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) | ||||||
|  |                             .header("Content-Type", "application/json") | ||||||
|  |                             .timeout(Duration.ofSeconds(requestTimeoutSeconds)) | ||||||
|  |                             .build(); | ||||||
|  |             HttpResponse<InputStream> response = | ||||||
|  |                     httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||||
|  |             int statusCode = response.statusCode(); | ||||||
|  |             this.httpStatusCode = statusCode; | ||||||
|  |  | ||||||
|  |             InputStream responseBodyStream = response.body(); | ||||||
|  |             try (BufferedReader reader = | ||||||
|  |                          new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||||
|  |                 String line; | ||||||
|  |                 StringBuilder responseBuffer = new StringBuilder(); | ||||||
|  |                 while ((line = reader.readLine()) != null) { | ||||||
|  |                     if (statusCode == 404) { | ||||||
|  |                         OllamaErrorResponseModel ollamaResponseModel = | ||||||
|  |                                 Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); | ||||||
|  |                         stream.add(ollamaResponseModel.getError()); | ||||||
|  |                         responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                     } else { | ||||||
|  |                         OllamaGenerateResponseModel ollamaResponseModel = | ||||||
|  |                                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|  |                         String res = ollamaResponseModel.getResponse(); | ||||||
|  |                         stream.add(res); | ||||||
|  |                         if (!ollamaResponseModel.isDone()) { | ||||||
|  |                             responseBuffer.append(res); | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 this.succeeded = true; | ||||||
|  |                 this.completeResponse = responseBuffer.toString(); | ||||||
|  |                 long endTime = System.currentTimeMillis(); | ||||||
|  |                 responseTime = endTime - startTime; | ||||||
|  |             } | ||||||
|  |             if (statusCode != 200) { | ||||||
|  |                 throw new OllamaBaseException(this.completeResponse); | ||||||
|  |             } | ||||||
|  |         } catch (IOException | InterruptedException | OllamaBaseException e) { | ||||||
|  |             this.succeeded = false; | ||||||
|  |             this.completeResponse = "[FAILED] " + e.getMessage(); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonInclude; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @JsonInclude(JsonInclude.Include.NON_NULL) | ||||||
|  | public abstract class OllamaCommonRequestModel { | ||||||
|  |    | ||||||
|  |   protected String model;   | ||||||
|  |   @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) | ||||||
|  |   @JsonProperty(value = "format") | ||||||
|  |   protected Boolean returnFormatJson; | ||||||
|  |   protected Map<String, Object> options; | ||||||
|  |   protected String template; | ||||||
|  |   protected boolean stream; | ||||||
|  |   @JsonProperty(value = "keep_alive") | ||||||
|  |   protected String keepAlive; | ||||||
|  |  | ||||||
|  |    | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -1,8 +1,6 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; |  | ||||||
| import java.util.List; |  | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
|   | |||||||
| @@ -0,0 +1,45 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  | import lombok.AllArgsConstructor; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  | import lombok.NonNull; | ||||||
|  | import lombok.RequiredArgsConstructor; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Defines a single Message to be used inside a chat request against the ollama /api/chat endpoint. | ||||||
|  |  * | ||||||
|  |  * @see <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate chat completion</a> | ||||||
|  |  */ | ||||||
|  | @Data | ||||||
|  | @AllArgsConstructor | ||||||
|  | @RequiredArgsConstructor | ||||||
|  | @NoArgsConstructor | ||||||
|  | public class OllamaChatMessage { | ||||||
|  |  | ||||||
|  |     @NonNull | ||||||
|  |     private OllamaChatMessageRole role; | ||||||
|  |  | ||||||
|  |     @NonNull | ||||||
|  |     private String content; | ||||||
|  |  | ||||||
|  |     @JsonSerialize(using = FileToBase64Serializer.class) | ||||||
|  |     private List<byte[]> images; | ||||||
|  |      | ||||||
|  |       @Override | ||||||
|  |   public String toString() { | ||||||
|  |     try { | ||||||
|  |       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
|  |     } catch (JsonProcessingException e) { | ||||||
|  |       throw new RuntimeException(e); | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,19 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.annotation.JsonValue; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Defines the possible Chat Message roles. | ||||||
|  |  */ | ||||||
|  | public enum OllamaChatMessageRole { | ||||||
|  |     SYSTEM("system"), | ||||||
|  |     USER("user"), | ||||||
|  |     ASSISTANT("assistant"); | ||||||
|  |  | ||||||
|  |     @JsonValue | ||||||
|  |     private String roleName; | ||||||
|  |  | ||||||
|  |     private OllamaChatMessageRole(String roleName){ | ||||||
|  |         this.roleName = roleName; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,110 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import java.io.File; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.net.URISyntaxException; | ||||||
|  | import java.nio.file.Files; | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.stream.Collectors; | ||||||
|  |  | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. | ||||||
|  |  */ | ||||||
|  | public class OllamaChatRequestBuilder { | ||||||
|  |  | ||||||
|  |     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); | ||||||
|  |  | ||||||
|  |     private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ | ||||||
|  |         request = new OllamaChatRequestModel(model, messages); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private OllamaChatRequestModel request; | ||||||
|  |  | ||||||
|  |     public static OllamaChatRequestBuilder getInstance(String model){ | ||||||
|  |         return new OllamaChatRequestBuilder(model, new ArrayList<>()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestModel build(){ | ||||||
|  |         return request; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void reset(){ | ||||||
|  |         request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){ | ||||||
|  |         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||||
|  |  | ||||||
|  |         List<byte[]> binaryImages = images.stream().map(file -> { | ||||||
|  |             try { | ||||||
|  |                 return Files.readAllBytes(file.toPath()); | ||||||
|  |             } catch (IOException e) { | ||||||
|  |                 LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e); | ||||||
|  |                 return new byte[0]; | ||||||
|  |             } | ||||||
|  |         }).collect(Collectors.toList()); | ||||||
|  |  | ||||||
|  |         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){ | ||||||
|  |         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||||
|  |         List<byte[]> binaryImages = null; | ||||||
|  |         if(imageUrls.length>0){ | ||||||
|  |             binaryImages = new ArrayList<>(); | ||||||
|  |             for (String imageUrl : imageUrls) { | ||||||
|  |                 try{ | ||||||
|  |                     binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); | ||||||
|  |                 } | ||||||
|  |                     catch (URISyntaxException e){ | ||||||
|  |                         LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e); | ||||||
|  |                 } | ||||||
|  |                 catch (IOException e){ | ||||||
|  |                     LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |          | ||||||
|  |         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){ | ||||||
|  |         this.request.getMessages().addAll(messages); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withOptions(Options options){ | ||||||
|  |         this.request.setOptions(options.getOptionsMap()); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withGetJsonResponse(){ | ||||||
|  |         this.request.setReturnFormatJson(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withTemplate(String template){ | ||||||
|  |         this.request.setTemplate(template); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withStreaming(){ | ||||||
|  |         this.request.setStream(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaChatRequestBuilder withKeepAlive(String keepAlive){ | ||||||
|  |         this.request.setKeepAlive(keepAlive); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,39 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  |  | ||||||
|  | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Defines a Request to use against the ollama /api/chat endpoint. | ||||||
|  |  * | ||||||
|  |  * @see <a href= | ||||||
|  |  *      "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate | ||||||
|  |  *      Chat Completion</a> | ||||||
|  |  */ | ||||||
|  | @Getter | ||||||
|  | @Setter | ||||||
|  | public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody { | ||||||
|  |  | ||||||
|  |   private List<OllamaChatMessage> messages; | ||||||
|  |  | ||||||
|  |   public OllamaChatRequestModel() {} | ||||||
|  |  | ||||||
|  |   public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) { | ||||||
|  |     this.model = model; | ||||||
|  |     this.messages = messages; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   @Override | ||||||
|  |   public boolean equals(Object o) { | ||||||
|  |     if (!(o instanceof OllamaChatRequestModel)) { | ||||||
|  |       return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return this.toString().equals(o.toString()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,23 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | public class OllamaChatResponseModel { | ||||||
|  |     private String model; | ||||||
|  |     private @JsonProperty("created_at") String createdAt; | ||||||
|  |     private @JsonProperty("done_reason") String doneReason; | ||||||
|  |     private OllamaChatMessage message; | ||||||
|  |     private boolean done; | ||||||
|  |     private String error; | ||||||
|  |     private List<Integer> context; | ||||||
|  |     private @JsonProperty("total_duration") Long totalDuration; | ||||||
|  |     private @JsonProperty("load_duration") Long loadDuration; | ||||||
|  |     private @JsonProperty("prompt_eval_duration") Long promptEvalDuration; | ||||||
|  |     private @JsonProperty("eval_duration") Long evalDuration; | ||||||
|  |     private @JsonProperty("prompt_eval_count") Integer promptEvalCount; | ||||||
|  |     private @JsonProperty("eval_count") Integer evalCount; | ||||||
|  | } | ||||||
| @@ -0,0 +1,32 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the | ||||||
|  |  * {@link OllamaChatMessageRole#ASSISTANT} role. | ||||||
|  |  */ | ||||||
|  | public class OllamaChatResult extends OllamaResult{ | ||||||
|  |  | ||||||
|  |     private List<OllamaChatMessage> chatHistory; | ||||||
|  |  | ||||||
|  |     public OllamaChatResult(String response, long responseTime, int httpStatusCode, | ||||||
|  |             List<OllamaChatMessage> chatHistory) { | ||||||
|  |         super(response, responseTime, httpStatusCode); | ||||||
|  |         this.chatHistory = chatHistory; | ||||||
|  |         appendAnswerToChatHistory(response); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public List<OllamaChatMessage> getChatHistory() { | ||||||
|  |         return chatHistory; | ||||||
|  |     }  | ||||||
|  |  | ||||||
|  |     private void appendAnswerToChatHistory(String answer){ | ||||||
|  |         OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer); | ||||||
|  |         this.chatHistory.add(assistantMessage); | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |      | ||||||
|  | } | ||||||
| @@ -0,0 +1,31 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | public class OllamaChatStreamObserver { | ||||||
|  |  | ||||||
|  |     private OllamaStreamHandler streamHandler; | ||||||
|  |  | ||||||
|  |     private List<OllamaChatResponseModel> responseParts = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |     private String message = ""; | ||||||
|  |  | ||||||
|  |     public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) { | ||||||
|  |         this.streamHandler = streamHandler; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void notify(OllamaChatResponseModel currentResponsePart) { | ||||||
|  |         responseParts.add(currentResponsePart); | ||||||
|  |         handleCurrentResponsePart(currentResponsePart); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) { | ||||||
|  |         message = message + currentResponsePart.getMessage().getContent(); | ||||||
|  |         streamHandler.accept(message); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
| 
 | 
 | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| 
 | 
 | ||||||
| @@ -7,7 +7,7 @@ import lombok.Data; | |||||||
| 
 | 
 | ||||||
| @SuppressWarnings("unused") | @SuppressWarnings("unused") | ||||||
| @Data | @Data | ||||||
| public class EmbeddingResponse { | public class OllamaEmbeddingResponseModel { | ||||||
|     @JsonProperty("embedding") |     @JsonProperty("embedding") | ||||||
|     private List<Double> embedding; |     private List<Double> embedding; | ||||||
| } | } | ||||||
| @@ -0,0 +1,31 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  |  | ||||||
|  | public class OllamaEmbeddingsRequestBuilder { | ||||||
|  |  | ||||||
|  |     private OllamaEmbeddingsRequestBuilder(String model, String prompt){ | ||||||
|  |         request = new OllamaEmbeddingsRequestModel(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private OllamaEmbeddingsRequestModel request; | ||||||
|  |  | ||||||
|  |     public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){ | ||||||
|  |         return new OllamaEmbeddingsRequestBuilder(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestModel build(){ | ||||||
|  |         return request; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestBuilder withOptions(Options options){ | ||||||
|  |         this.request.setOptions(options.getOptionsMap()); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){ | ||||||
|  |         this.request.setKeepAlive(keepAlive); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,31 +1,28 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||||
| 
 | 
 | ||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||||
| 
 |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; |  | ||||||
| import java.util.List; |  | ||||||
| import java.util.Map; | import java.util.Map; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
| import lombok.Data; | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  | import lombok.NonNull; | ||||||
|  | import lombok.RequiredArgsConstructor; | ||||||
| 
 | 
 | ||||||
| @Data | @Data | ||||||
| public class OllamaRequestModel { | @RequiredArgsConstructor | ||||||
| 
 | @NoArgsConstructor | ||||||
|  | public class OllamaEmbeddingsRequestModel { | ||||||
|  |   @NonNull | ||||||
|   private String model; |   private String model; | ||||||
|  |   @NonNull | ||||||
|   private String prompt; |   private String prompt; | ||||||
|   private List<String> images; |  | ||||||
|   private Map<String, Object> options; |  | ||||||
| 
 | 
 | ||||||
|   public OllamaRequestModel(String model, String prompt) { |   protected Map<String, Object> options; | ||||||
|     this.model = model; |   @JsonProperty(value = "keep_alive") | ||||||
|     this.prompt = prompt; |   private String keepAlive; | ||||||
|   } |  | ||||||
| 
 |  | ||||||
|   public OllamaRequestModel(String model, String prompt, List<String> images) { |  | ||||||
|     this.model = model; |  | ||||||
|     this.prompt = prompt; |  | ||||||
|     this.images = images; |  | ||||||
|   } |  | ||||||
| 
 | 
 | ||||||
|  |   @Override | ||||||
|   public String toString() { |   public String toString() { | ||||||
|     try { |     try { | ||||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); |       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||||
| @@ -0,0 +1,55 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}  | ||||||
|  |  * objects using the builder-pattern. | ||||||
|  |  */ | ||||||
|  | public class OllamaGenerateRequestBuilder { | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestBuilder(String model, String prompt){ | ||||||
|  |         request = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestModel request; | ||||||
|  |  | ||||||
|  |     public static OllamaGenerateRequestBuilder getInstance(String model){ | ||||||
|  |         return new OllamaGenerateRequestBuilder(model,""); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestModel build(){ | ||||||
|  |         return request; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withPrompt(String prompt){ | ||||||
|  |         request.setPrompt(prompt); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     public OllamaGenerateRequestBuilder withGetJsonResponse(){ | ||||||
|  |         this.request.setReturnFormatJson(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withOptions(Options options){ | ||||||
|  |         this.request.setOptions(options.getOptionsMap()); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withTemplate(String template){ | ||||||
|  |         this.request.setTemplate(template); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withStreaming(){ | ||||||
|  |         this.request.setStream(true); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){ | ||||||
|  |         this.request.setKeepAlive(keepAlive); | ||||||
|  |         return this; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,46 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  |  | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
|  |  | ||||||
|  | @Getter | ||||||
|  | @Setter | ||||||
|  | public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{ | ||||||
|  |  | ||||||
|  |   private String prompt; | ||||||
|  |   private List<String> images; | ||||||
|  |  | ||||||
|  |   private String system; | ||||||
|  |   private String context; | ||||||
|  |   private boolean raw; | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel() { | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel(String model, String prompt) { | ||||||
|  |     this.model = model; | ||||||
|  |     this.prompt = prompt; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public OllamaGenerateRequestModel(String model, String prompt, List<String> images) { | ||||||
|  |     this.model = model; | ||||||
|  |     this.prompt = prompt; | ||||||
|  |     this.images = images; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |   public boolean equals(Object o) { | ||||||
|  |     if (!(o instanceof OllamaGenerateRequestModel)) { | ||||||
|  |       return false; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return this.toString().equals(o.toString()); | ||||||
|  |   } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
| 
 | 
 | ||||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| @@ -8,7 +8,7 @@ import lombok.Data; | |||||||
| 
 | 
 | ||||||
| @Data | @Data | ||||||
| @JsonIgnoreProperties(ignoreUnknown = true) | @JsonIgnoreProperties(ignoreUnknown = true) | ||||||
| public class OllamaResponseModel { | public class OllamaGenerateResponseModel { | ||||||
|     private String model; |     private String model; | ||||||
|     private @JsonProperty("created_at") String createdAt; |     private @JsonProperty("created_at") String createdAt; | ||||||
|     private String response; |     private String response; | ||||||
| @@ -0,0 +1,29 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | public class OllamaGenerateStreamObserver { | ||||||
|  |  | ||||||
|  |     private OllamaStreamHandler streamHandler; | ||||||
|  |  | ||||||
|  |     private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |     private String message = ""; | ||||||
|  |  | ||||||
|  |     public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) { | ||||||
|  |         this.streamHandler = streamHandler; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void notify(OllamaGenerateResponseModel currentResponsePart) { | ||||||
|  |         responseParts.add(currentResponsePart); | ||||||
|  |         handleCurrentResponsePart(currentResponsePart); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) { | ||||||
|  |         message = message + currentResponsePart.getResponse(); | ||||||
|  |         streamHandler.accept(message); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import java.util.function.Consumer; | ||||||
|  |  | ||||||
|  | public interface OllamaStreamHandler extends Consumer<String> { | ||||||
|  |     void accept(String message); | ||||||
|  | } | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; |  | ||||||
|  |  | ||||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; |  | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; |  | ||||||
| import lombok.AllArgsConstructor; |  | ||||||
| import lombok.Data; |  | ||||||
|  |  | ||||||
| @Data |  | ||||||
| @AllArgsConstructor |  | ||||||
| public class ModelEmbeddingsRequest { |  | ||||||
|   private String model; |  | ||||||
|   private String prompt; |  | ||||||
|  |  | ||||||
|   @Override |  | ||||||
|   public String toString() { |  | ||||||
|     try { |  | ||||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); |  | ||||||
|     } catch (JsonProcessingException e) { |  | ||||||
|       throw new RuntimeException(e); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,55 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Specialization class for requests | ||||||
|  |  */ | ||||||
|  | public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||||
|  |  | ||||||
|  |     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); | ||||||
|  |  | ||||||
|  |     private OllamaChatStreamObserver streamObserver; | ||||||
|  |  | ||||||
|  |     public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||||
|  |         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     protected String getEndpointSuffix() { | ||||||
|  |         return "/api/chat"; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||||
|  |         try { | ||||||
|  |             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||||
|  |             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); | ||||||
|  |             if (streamObserver != null) { | ||||||
|  |                 streamObserver.notify(ollamaResponseModel); | ||||||
|  |             } | ||||||
|  |             return ollamaResponseModel.isDone(); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             LOG.error("Error parsing the Ollama chat response!", e); | ||||||
|  |             return true; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         streamObserver = new OllamaChatStreamObserver(streamHandler); | ||||||
|  |         return super.callSync(body); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,152 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | import java.io.BufferedReader; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.io.InputStream; | ||||||
|  | import java.io.InputStreamReader; | ||||||
|  | import java.net.URI; | ||||||
|  | import java.net.http.HttpClient; | ||||||
|  | import java.net.http.HttpRequest; | ||||||
|  | import java.net.http.HttpResponse; | ||||||
|  | import java.nio.charset.StandardCharsets; | ||||||
|  | import java.time.Duration; | ||||||
|  | import java.util.Base64; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Abstract helperclass to call the ollama api server. | ||||||
|  |  */ | ||||||
|  | public abstract class OllamaEndpointCaller { | ||||||
|  |  | ||||||
|  |     private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class); | ||||||
|  |  | ||||||
|  |     private String host; | ||||||
|  |     private BasicAuth basicAuth; | ||||||
|  |     private long requestTimeoutSeconds; | ||||||
|  |     private boolean verbose; | ||||||
|  |  | ||||||
|  |     public OllamaEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||||
|  |         this.host = host; | ||||||
|  |         this.basicAuth = basicAuth; | ||||||
|  |         this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||||
|  |         this.verbose = verbose; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected abstract String getEndpointSuffix(); | ||||||
|  |  | ||||||
|  |     protected abstract boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer); | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response. | ||||||
|  |      * | ||||||
|  |      * @param body POST body payload | ||||||
|  |      * @return result answer given by the assistant | ||||||
|  |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|  |      * @throws IOException          in case the responseStream can not be read | ||||||
|  |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|  |      */ | ||||||
|  |     public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         // Create Request | ||||||
|  |         long startTime = System.currentTimeMillis(); | ||||||
|  |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
|  |         URI uri = URI.create(this.host + getEndpointSuffix()); | ||||||
|  |         HttpRequest.Builder requestBuilder = | ||||||
|  |                 getRequestBuilderDefault(uri) | ||||||
|  |                         .POST( | ||||||
|  |                                 body.getBodyPublisher()); | ||||||
|  |         HttpRequest request = requestBuilder.build(); | ||||||
|  |         if (this.verbose) LOG.info("Asking model: " + body.toString()); | ||||||
|  |         HttpResponse<InputStream> response = | ||||||
|  |                 httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||||
|  |  | ||||||
|  |         int statusCode = response.statusCode(); | ||||||
|  |         InputStream responseBodyStream = response.body(); | ||||||
|  |         StringBuilder responseBuffer = new StringBuilder(); | ||||||
|  |         try (BufferedReader reader = | ||||||
|  |                      new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||||
|  |             String line; | ||||||
|  |             while ((line = reader.readLine()) != null) { | ||||||
|  |                 if (statusCode == 404) { | ||||||
|  |                     LOG.warn("Status code: 404 (Not Found)"); | ||||||
|  |                     OllamaErrorResponseModel ollamaResponseModel = | ||||||
|  |                             Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); | ||||||
|  |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                 } else if (statusCode == 401) { | ||||||
|  |                     LOG.warn("Status code: 401 (Unauthorized)"); | ||||||
|  |                     OllamaErrorResponseModel ollamaResponseModel = | ||||||
|  |                             Utils.getObjectMapper() | ||||||
|  |                                     .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); | ||||||
|  |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                 } else if (statusCode == 400) { | ||||||
|  |                     LOG.warn("Status code: 400 (Bad Request)"); | ||||||
|  |                     OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, | ||||||
|  |                             OllamaErrorResponseModel.class); | ||||||
|  |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                 } else { | ||||||
|  |                     boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); | ||||||
|  |                     if (finished) { | ||||||
|  |                         break; | ||||||
|  |                     } | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         if (statusCode != 200) { | ||||||
|  |             LOG.error("Status code " + statusCode); | ||||||
|  |             throw new OllamaBaseException(responseBuffer.toString()); | ||||||
|  |         } else { | ||||||
|  |             long endTime = System.currentTimeMillis(); | ||||||
|  |             OllamaResult ollamaResult = | ||||||
|  |                     new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode); | ||||||
|  |             if (verbose) LOG.info("Model response: " + ollamaResult); | ||||||
|  |             return ollamaResult; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Get default request builder. | ||||||
|  |      * | ||||||
|  |      * @param uri URI to get a HttpRequest.Builder | ||||||
|  |      * @return HttpRequest.Builder | ||||||
|  |      */ | ||||||
|  |     private HttpRequest.Builder getRequestBuilderDefault(URI uri) { | ||||||
|  |         HttpRequest.Builder requestBuilder = | ||||||
|  |                 HttpRequest.newBuilder(uri) | ||||||
|  |                         .header("Content-Type", "application/json") | ||||||
|  |                         .timeout(Duration.ofSeconds(this.requestTimeoutSeconds)); | ||||||
|  |         if (isBasicAuthCredentialsSet()) { | ||||||
|  |             requestBuilder.header("Authorization", getBasicAuthHeaderValue()); | ||||||
|  |         } | ||||||
|  |         return requestBuilder; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Get basic authentication header value. | ||||||
|  |      * | ||||||
|  |      * @return basic authentication header value (encoded credentials) | ||||||
|  |      */ | ||||||
|  |     private String getBasicAuthHeaderValue() { | ||||||
|  |         String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword(); | ||||||
|  |         return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Check if Basic Auth credentials set. | ||||||
|  |      * | ||||||
|  |      * @return true when Basic Auth credentials set | ||||||
|  |      */ | ||||||
|  |     private boolean isBasicAuthCredentialsSet() { | ||||||
|  |         return this.basicAuth != null; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,52 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  |  | ||||||
|  | public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { | ||||||
|  |  | ||||||
|  |     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); | ||||||
|  |  | ||||||
|  |     private OllamaGenerateStreamObserver streamObserver; | ||||||
|  |  | ||||||
|  |     public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||||
|  |         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     protected String getEndpointSuffix() { | ||||||
|  |         return "/api/generate"; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||||
|  |         try { | ||||||
|  |             OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|  |             responseBuffer.append(ollamaResponseModel.getResponse()); | ||||||
|  |             if (streamObserver != null) { | ||||||
|  |                 streamObserver.notify(ollamaResponseModel); | ||||||
|  |             } | ||||||
|  |             return ollamaResponseModel.isDone(); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             LOG.error("Error parsing the Ollama chat response!", e); | ||||||
|  |             return true; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |         streamObserver = new OllamaGenerateStreamObserver(streamHandler); | ||||||
|  |         return super.callSync(body); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import lombok.AllArgsConstructor; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @NoArgsConstructor | ||||||
|  | @AllArgsConstructor | ||||||
|  | public class OllamaToolsResult { | ||||||
|  |     private OllamaResult modelResult; | ||||||
|  |     private Map<ToolFunctionCallSpec, Object> toolResults; | ||||||
|  |  | ||||||
|  |     public List<ToolResult> getToolResults() { | ||||||
|  |         List<ToolResult> results = new ArrayList<>(); | ||||||
|  |         for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) { | ||||||
|  |             results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue())); | ||||||
|  |         } | ||||||
|  |         return results; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Data | ||||||
|  |     @NoArgsConstructor | ||||||
|  |     @AllArgsConstructor | ||||||
|  |     public static class ToolResult { | ||||||
|  |         private String functionName; | ||||||
|  |         private Map<String, Object> functionArguments; | ||||||
|  |         private Object result; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @FunctionalInterface | ||||||
|  | public interface ToolFunction { | ||||||
|  |     Object apply(Map<String, Object> arguments); | ||||||
|  | } | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import lombok.AllArgsConstructor; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @AllArgsConstructor | ||||||
|  | @NoArgsConstructor | ||||||
|  | public class ToolFunctionCallSpec { | ||||||
|  |     private String name; | ||||||
|  |     private Map<String, Object> arguments; | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import java.util.HashMap; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | public class ToolRegistry { | ||||||
|  |     private final Map<String, ToolFunction> functionMap = new HashMap<>(); | ||||||
|  |  | ||||||
|  |     public ToolFunction getFunction(String name) { | ||||||
|  |         return functionMap.get(name); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void addFunction(String name, ToolFunction function) { | ||||||
|  |         functionMap.put(name, function); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.annotation.JsonIgnore; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonInclude; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Builder; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.HashMap; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | public class Tools { | ||||||
|  |     @Data | ||||||
|  |     @Builder | ||||||
|  |     public static class ToolSpecification { | ||||||
|  |         private String functionName; | ||||||
|  |         private String functionDescription; | ||||||
|  |         private Map<String, PromptFuncDefinition.Property> properties; | ||||||
|  |         private ToolFunction toolDefinition; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Data | ||||||
|  |     @JsonIgnoreProperties(ignoreUnknown = true) | ||||||
|  |     public static class PromptFuncDefinition { | ||||||
|  |         private String type; | ||||||
|  |         private PromptFuncSpec function; | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         public static class PromptFuncSpec { | ||||||
|  |             private String name; | ||||||
|  |             private String description; | ||||||
|  |             private Parameters parameters; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         public static class Parameters { | ||||||
|  |             private String type; | ||||||
|  |             private Map<String, Property> properties; | ||||||
|  |             private List<String> required; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         @Builder | ||||||
|  |         public static class Property { | ||||||
|  |             private String type; | ||||||
|  |             private String description; | ||||||
|  |             @JsonProperty("enum") | ||||||
|  |             @JsonInclude(JsonInclude.Include.NON_NULL) | ||||||
|  |             private List<String> enumValues; | ||||||
|  |             @JsonIgnore | ||||||
|  |             private boolean required; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static class PropsBuilder { | ||||||
|  |         private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>(); | ||||||
|  |  | ||||||
|  |         public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) { | ||||||
|  |             props.put(key, property); | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public Map<String, PromptFuncDefinition.Property> build() { | ||||||
|  |             return props; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static class PromptBuilder { | ||||||
|  |         private final List<PromptFuncDefinition> tools = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |         private String promptText; | ||||||
|  |  | ||||||
|  |         public String build() throws JsonProcessingException { | ||||||
|  |             return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]"; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public PromptBuilder withPrompt(String prompt) throws JsonProcessingException { | ||||||
|  |             promptText = prompt; | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public PromptBuilder withToolSpecification(ToolSpecification spec) { | ||||||
|  |             PromptFuncDefinition def = new PromptFuncDefinition(); | ||||||
|  |             def.setType("function"); | ||||||
|  |  | ||||||
|  |             PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||||
|  |             functionDetail.setName(spec.getFunctionName()); | ||||||
|  |             functionDetail.setDescription(spec.getFunctionDescription()); | ||||||
|  |  | ||||||
|  |             PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||||
|  |             parameters.setType("object"); | ||||||
|  |             parameters.setProperties(spec.getProperties()); | ||||||
|  |  | ||||||
|  |             List<String> requiredValues = new ArrayList<>(); | ||||||
|  |             for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) { | ||||||
|  |                 if (p.getValue().isRequired()) { | ||||||
|  |                     requiredValues.add(p.getKey()); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             parameters.setRequired(requiredValues); | ||||||
|  |             functionDetail.setParameters(parameters); | ||||||
|  |             def.setFunction(functionDetail); | ||||||
|  |  | ||||||
|  |             tools.add(def); | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -8,57 +8,81 @@ package io.github.amithkoujalgi.ollama4j.core.types; | |||||||
|  */ |  */ | ||||||
| @SuppressWarnings("ALL") | @SuppressWarnings("ALL") | ||||||
| public class OllamaModelType { | public class OllamaModelType { | ||||||
|  |     public static final String GEMMA = "gemma"; | ||||||
|  |     public static final String GEMMA2 = "gemma2"; | ||||||
|  |  | ||||||
|  |  | ||||||
|     public static final String LLAMA2 = "llama2"; |     public static final String LLAMA2 = "llama2"; | ||||||
|  |     public static final String LLAMA3 = "llama3"; | ||||||
|     public static final String MISTRAL = "mistral"; |     public static final String MISTRAL = "mistral"; | ||||||
|   public static final String LLAVA = "llava"; |  | ||||||
|     public static final String MIXTRAL = "mixtral"; |     public static final String MIXTRAL = "mixtral"; | ||||||
|   public static final String STARLING_LM = "starling-lm"; |     public static final String LLAVA = "llava"; | ||||||
|  |     public static final String LLAVA_PHI3 = "llava-phi3"; | ||||||
|     public static final String NEURAL_CHAT = "neural-chat"; |     public static final String NEURAL_CHAT = "neural-chat"; | ||||||
|     public static final String CODELLAMA = "codellama"; |     public static final String CODELLAMA = "codellama"; | ||||||
|   public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; |  | ||||||
|     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; |     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; | ||||||
|  |     public static final String MISTRAL_OPENORCA = "mistral-openorca"; | ||||||
|  |     public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; | ||||||
|  |     public static final String PHI = "phi"; | ||||||
|  |     public static final String PHI3 = "phi3"; | ||||||
|     public static final String ORCA_MINI = "orca-mini"; |     public static final String ORCA_MINI = "orca-mini"; | ||||||
|  |     public static final String DEEPSEEK_CODER = "deepseek-coder"; | ||||||
|  |     public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; | ||||||
|     public static final String VICUNA = "vicuna"; |     public static final String VICUNA = "vicuna"; | ||||||
|     public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; |     public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; | ||||||
|   public static final String PHIND_CODELLAMA = "phind-codellama"; |  | ||||||
|   public static final String PHI = "phi"; |  | ||||||
|     public static final String ZEPHYR = "zephyr"; |     public static final String ZEPHYR = "zephyr"; | ||||||
|  |     public static final String OPENHERMES = "openhermes"; | ||||||
|  |     public static final String QWEN = "qwen"; | ||||||
|  |  | ||||||
|  |     public static final String QWEN2 = "qwen2"; | ||||||
|     public static final String WIZARDCODER = "wizardcoder"; |     public static final String WIZARDCODER = "wizardcoder"; | ||||||
|   public static final String MISTRAL_OPENORCA = "mistral-openorca"; |  | ||||||
|   public static final String NOUS_HERMES = "nous-hermes"; |  | ||||||
|   public static final String DEEPSEEK_CODER = "deepseek-coder"; |  | ||||||
|   public static final String WIZARD_MATH = "wizard-math"; |  | ||||||
|     public static final String LLAMA2_CHINESE = "llama2-chinese"; |     public static final String LLAMA2_CHINESE = "llama2-chinese"; | ||||||
|   public static final String FALCON = "falcon"; |     public static final String TINYLLAMA = "tinyllama"; | ||||||
|   public static final String ORCA2 = "orca2"; |     public static final String PHIND_CODELLAMA = "phind-codellama"; | ||||||
|   public static final String STABLE_BELUGA = "stable-beluga"; |  | ||||||
|   public static final String CODEUP = "codeup"; |  | ||||||
|   public static final String EVERYTHINGLM = "everythinglm"; |  | ||||||
|   public static final String MEDLLAMA2 = "medllama2"; |  | ||||||
|   public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; |  | ||||||
|   public static final String STARCODER = "starcoder"; |  | ||||||
|   public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral"; |  | ||||||
|     public static final String OPENCHAT = "openchat"; |     public static final String OPENCHAT = "openchat"; | ||||||
|   public static final String WIZARD_VICUNA = "wizard-vicuna"; |     public static final String ORCA2 = "orca2"; | ||||||
|   public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral"; |     public static final String FALCON = "falcon"; | ||||||
|   public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; |     public static final String WIZARD_MATH = "wizard-math"; | ||||||
|  |     public static final String TINYDOLPHIN = "tinydolphin"; | ||||||
|  |     public static final String NOUS_HERMES = "nous-hermes"; | ||||||
|     public static final String YI = "yi"; |     public static final String YI = "yi"; | ||||||
|   public static final String YARN_MISTRAL = "yarn-mistral"; |     public static final String DOLPHIN_PHI = "dolphin-phi"; | ||||||
|   public static final String SAMANTHA_MISTRAL = "samantha-mistral"; |     public static final String STARLING_LM = "starling-lm"; | ||||||
|   public static final String SQLCODER = "sqlcoder"; |     public static final String STARCODER = "starcoder"; | ||||||
|   public static final String YARN_LLAMA2 = "yarn-llama2"; |     public static final String CODEUP = "codeup"; | ||||||
|   public static final String MEDITRON = "meditron"; |     public static final String MEDLLAMA2 = "medllama2"; | ||||||
|   public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; |     public static final String STABLE_CODE = "stable-code"; | ||||||
|   public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral"; |     public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; | ||||||
|   public static final String DEEPSEEK_LLM = "deepseek-llm"; |  | ||||||
|   public static final String MISTRALLITE = "mistrallite"; |  | ||||||
|   public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral"; |  | ||||||
|   public static final String WIZARDLM = "wizardlm"; |  | ||||||
|   public static final String CODEBOOGA = "codebooga"; |  | ||||||
|   public static final String MAGICODER = "magicoder"; |  | ||||||
|   public static final String GOLIATH = "goliath"; |  | ||||||
|   public static final String NEXUSRAVEN = "nexusraven"; |  | ||||||
|   public static final String ALFRED = "alfred"; |  | ||||||
|   public static final String XWINLM = "xwinlm"; |  | ||||||
|     public static final String BAKLLAVA = "bakllava"; |     public static final String BAKLLAVA = "bakllava"; | ||||||
|  |     public static final String EVERYTHINGLM = "everythinglm"; | ||||||
|  |     public static final String SOLAR = "solar"; | ||||||
|  |     public static final String STABLE_BELUGA = "stable-beluga"; | ||||||
|  |     public static final String SQLCODER = "sqlcoder"; | ||||||
|  |     public static final String YARN_MISTRAL = "yarn-mistral"; | ||||||
|  |     public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; | ||||||
|  |     public static final String SAMANTHA_MISTRAL = "samantha-mistral"; | ||||||
|  |     public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; | ||||||
|  |     public static final String MEDITRON = "meditron"; | ||||||
|  |     public static final String WIZARD_VICUNA = "wizard-vicuna"; | ||||||
|  |     public static final String STABLELM2 = "stablelm2"; | ||||||
|  |     public static final String MAGICODER = "magicoder"; | ||||||
|  |     public static final String YARN_LLAMA2 = "yarn-llama2"; | ||||||
|  |     public static final String NOUS_HERMES2 = "nous-hermes2"; | ||||||
|  |     public static final String DEEPSEEK_LLM = "deepseek-llm"; | ||||||
|  |     public static final String LLAMA_PRO = "llama-pro"; | ||||||
|  |     public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; | ||||||
|  |     public static final String CODEBOOGA = "codebooga"; | ||||||
|  |     public static final String MISTRALLITE = "mistrallite"; | ||||||
|  |     public static final String NEXUSRAVEN = "nexusraven"; | ||||||
|  |     public static final String GOLIATH = "goliath"; | ||||||
|  |     public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; | ||||||
|  |     public static final String NOTUX = "notux"; | ||||||
|  |     public static final String ALFRED = "alfred"; | ||||||
|  |     public static final String MEGADOLPHIN = "megadolphin"; | ||||||
|  |     public static final String WIZARDLM = "wizardlm"; | ||||||
|  |     public static final String XWINLM = "xwinlm"; | ||||||
|  |     public static final String NOTUS = "notus"; | ||||||
|  |     public static final String DUCKDB_NSQL = "duckdb-nsql"; | ||||||
|  |     public static final String ALL_MINILM = "all-minilm"; | ||||||
|  |     public static final String CODESTRAL = "codestral"; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,21 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonGenerator; | ||||||
|  | import com.fasterxml.jackson.databind.JsonSerializer; | ||||||
|  | import com.fasterxml.jackson.databind.SerializerProvider; | ||||||
|  |  | ||||||
|  | public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{ | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException { | ||||||
|  |             gen.writeString("json"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public boolean isEmpty(SerializerProvider provider,Boolean value){ | ||||||
|  |         return !value; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,21 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.Base64; | ||||||
|  | import java.util.Collection; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.core.JsonGenerator; | ||||||
|  | import com.fasterxml.jackson.databind.JsonSerializer; | ||||||
|  | import com.fasterxml.jackson.databind.SerializerProvider; | ||||||
|  |  | ||||||
|  | public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> { | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void serialize(Collection<byte[]> value, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException { | ||||||
|  |         jsonGenerator.writeStartArray(); | ||||||
|  |         for (byte[] file : value) { | ||||||
|  |             jsonGenerator.writeString(Base64.getEncoder().encodeToString(file)); | ||||||
|  |         } | ||||||
|  |         jsonGenerator.writeEndArray(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,28 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.net.http.HttpRequest.BodyPublisher; | ||||||
|  | import java.net.http.HttpRequest.BodyPublishers; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.annotation.JsonIgnore; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Interface to represent a OllamaRequest as HTTP-Request Body via {@link BodyPublishers}. | ||||||
|  |  */ | ||||||
|  | public interface OllamaRequestBody { | ||||||
|  |      | ||||||
|  |     /** | ||||||
|  |      * Transforms the OllamaRequest Object to a JSON Object via Jackson. | ||||||
|  |      *  | ||||||
|  |      * @return JSON representation of a OllamaRequest | ||||||
|  |      */ | ||||||
|  |     @JsonIgnore | ||||||
|  |     default BodyPublisher getBodyPublisher(){ | ||||||
|  |                 try { | ||||||
|  |           return BodyPublishers.ofString( | ||||||
|  |                       Utils.getObjectMapper().writeValueAsString(this)); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |           throw new IllegalArgumentException("Request not Body convertible.",e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,9 +1,38 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.utils; | package io.github.amithkoujalgi.ollama4j.core.utils; | ||||||
|  |  | ||||||
|  | import java.io.ByteArrayOutputStream; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.io.InputStream; | ||||||
|  | import java.net.URI; | ||||||
|  | import java.net.URISyntaxException; | ||||||
|  | import java.net.URL; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.databind.ObjectMapper; | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
|  | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; | ||||||
|  |  | ||||||
| public class Utils { | public class Utils { | ||||||
|  |  | ||||||
|  |   private static ObjectMapper objectMapper; | ||||||
|  |  | ||||||
|   public static ObjectMapper getObjectMapper() { |   public static ObjectMapper getObjectMapper() { | ||||||
|     return new ObjectMapper(); |     if(objectMapper == null) { | ||||||
|  |       objectMapper = new ObjectMapper(); | ||||||
|  |       objectMapper.registerModule(new JavaTimeModule()); | ||||||
|  |     } | ||||||
|  |     return objectMapper; | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   public static byte[] loadImageBytesFromUrl(String imageUrl) | ||||||
|  |       throws IOException, URISyntaxException { | ||||||
|  |     URL url = new URI(imageUrl).toURL(); | ||||||
|  |     try (InputStream in = url.openStream(); | ||||||
|  |         ByteArrayOutputStream out = new ByteArrayOutputStream()) { | ||||||
|  |       byte[] buffer = new byte[1024]; | ||||||
|  |       int bytesRead; | ||||||
|  |       while ((bytesRead = in.read(buffer)) != -1) { | ||||||
|  |         out.write(buffer, 0, bytesRead); | ||||||
|  |       } | ||||||
|  |       return out.toByteArray(); | ||||||
|  |     } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,12 +1,23 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.integrationtests; | package io.github.amithkoujalgi.ollama4j.integrationtests; | ||||||
|  |  | ||||||
| import static org.junit.jupiter.api.Assertions.*; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  | import lombok.Data; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Order; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import java.io.File; | import java.io.File; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.io.InputStream; | import java.io.InputStream; | ||||||
| @@ -16,26 +27,15 @@ import java.net.http.HttpConnectTimeoutException; | |||||||
| import java.util.List; | import java.util.List; | ||||||
| import java.util.Objects; | import java.util.Objects; | ||||||
| import java.util.Properties; | import java.util.Properties; | ||||||
| import org.junit.jupiter.api.BeforeEach; |  | ||||||
| import org.junit.jupiter.api.Order; | import static org.junit.jupiter.api.Assertions.*; | ||||||
| import org.junit.jupiter.api.Test; |  | ||||||
|  |  | ||||||
| class TestRealAPIs { | class TestRealAPIs { | ||||||
|   OllamaAPI ollamaAPI; |  | ||||||
|  |  | ||||||
|   private Properties loadProperties() { |     private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); | ||||||
|     Properties properties = new Properties(); |  | ||||||
|     try (InputStream input = |     OllamaAPI ollamaAPI; | ||||||
|         getClass().getClassLoader().getResourceAsStream("test-config.properties")) { |     Config config; | ||||||
|       if (input == null) { |  | ||||||
|         throw new RuntimeException("Sorry, unable to find test-config.properties"); |  | ||||||
|       } |  | ||||||
|       properties.load(input); |  | ||||||
|       return properties; |  | ||||||
|     } catch (IOException e) { |  | ||||||
|       throw new RuntimeException("Error loading properties", e); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|     private File getImageFileFromClasspath(String fileName) { |     private File getImageFileFromClasspath(String fileName) { | ||||||
|         ClassLoader classLoader = getClass().getClassLoader(); |         ClassLoader classLoader = getClass().getClassLoader(); | ||||||
| @@ -44,9 +44,9 @@ class TestRealAPIs { | |||||||
|  |  | ||||||
|     @BeforeEach |     @BeforeEach | ||||||
|     void setUp() { |     void setUp() { | ||||||
|     Properties properties = loadProperties(); |         config = new Config(); | ||||||
|     ollamaAPI = new OllamaAPI(properties.getProperty("ollama.api.url")); |         ollamaAPI = new OllamaAPI(config.getOllamaURL()); | ||||||
|     ollamaAPI.setRequestTimeoutSeconds(20); |         ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -64,7 +64,7 @@ class TestRealAPIs { | |||||||
|         } catch (HttpConnectTimeoutException e) { |         } catch (HttpConnectTimeoutException e) { | ||||||
|             fail(e.getMessage()); |             fail(e.getMessage()); | ||||||
|         } catch (Exception e) { |         } catch (Exception e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -76,7 +76,7 @@ class TestRealAPIs { | |||||||
|             assertNotNull(ollamaAPI.listModels()); |             assertNotNull(ollamaAPI.listModels()); | ||||||
|             ollamaAPI.listModels().forEach(System.out::println); |             ollamaAPI.listModels().forEach(System.out::println); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -85,13 +85,26 @@ class TestRealAPIs { | |||||||
|     void testPullModel() { |     void testPullModel() { | ||||||
|         testEndpointReachability(); |         testEndpointReachability(); | ||||||
|         try { |         try { | ||||||
|       ollamaAPI.pullModel(OllamaModelType.LLAMA2); |             ollamaAPI.pullModel(config.getModel()); | ||||||
|             boolean found = |             boolean found = | ||||||
|                     ollamaAPI.listModels().stream() |                     ollamaAPI.listModels().stream() | ||||||
|               .anyMatch(model -> model.getModelName().equals(OllamaModelType.LLAMA2)); |                             .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); | ||||||
|             assertTrue(found); |             assertTrue(found); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testListDtails() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); | ||||||
|  |             assertNotNull(modelDetails); | ||||||
|  |             System.out.println(modelDetails); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|  |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -101,15 +114,41 @@ class TestRealAPIs { | |||||||
|         testEndpointReachability(); |         testEndpointReachability(); | ||||||
|         try { |         try { | ||||||
|             OllamaResult result = |             OllamaResult result = | ||||||
|           ollamaAPI.ask( |                     ollamaAPI.generate( | ||||||
|               OllamaModelType.LLAMA2, |                             config.getModel(), | ||||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", |                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                             false, | ||||||
|                             new OptionsBuilder().build()); |                             new OptionsBuilder().build()); | ||||||
|             assertNotNull(result); |             assertNotNull(result); | ||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
|             assertFalse(result.getResponse().isEmpty()); |             assertFalse(result.getResponse().isEmpty()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testAskModelWithDefaultOptionsStreamed() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             StringBuffer sb = new StringBuffer(""); | ||||||
|  |             OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||||
|  |                     "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                     false, | ||||||
|  |                     new OptionsBuilder().build(), (s) -> { | ||||||
|  |                         LOG.info(s); | ||||||
|  |                         String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |                         LOG.info(substring); | ||||||
|  |                         sb.append(substring); | ||||||
|  |                     }); | ||||||
|  |  | ||||||
|  |             assertNotNull(result); | ||||||
|  |             assertNotNull(result.getResponse()); | ||||||
|  |             assertFalse(result.getResponse().isEmpty()); | ||||||
|  |             assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -119,15 +158,131 @@ class TestRealAPIs { | |||||||
|         testEndpointReachability(); |         testEndpointReachability(); | ||||||
|         try { |         try { | ||||||
|             OllamaResult result = |             OllamaResult result = | ||||||
|           ollamaAPI.ask( |                     ollamaAPI.generate( | ||||||
|               OllamaModelType.LLAMA2, |                             config.getModel(), | ||||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", |                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                             true, | ||||||
|                             new OptionsBuilder().setTemperature(0.9f).build()); |                             new OptionsBuilder().setTemperature(0.9f).build()); | ||||||
|             assertNotNull(result); |             assertNotNull(result); | ||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
|             assertFalse(result.getResponse().isEmpty()); |             assertFalse(result.getResponse().isEmpty()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testChat() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||||
|  |                     .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") | ||||||
|  |                     .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") | ||||||
|  |                     .build(); | ||||||
|  |  | ||||||
|  |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |             assertFalse(chatResult.getResponse().isBlank()); | ||||||
|  |             assertEquals(4, chatResult.getChatHistory().size()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testChatWithSystemPrompt() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, | ||||||
|  |                             "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||||
|  |                     .withMessage(OllamaChatMessageRole.USER, | ||||||
|  |                             "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                     .build(); | ||||||
|  |  | ||||||
|  |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |             assertFalse(chatResult.getResponse().isBlank()); | ||||||
|  |             assertTrue(chatResult.getResponse().startsWith("NI")); | ||||||
|  |             assertEquals(3, chatResult.getChatHistory().size()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testChatWithStream() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|  |             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||||
|  |                             "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|  |                     .build(); | ||||||
|  |  | ||||||
|  |             StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|  |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> { | ||||||
|  |                 LOG.info(s); | ||||||
|  |                 String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |                 LOG.info(substring); | ||||||
|  |                 sb.append(substring); | ||||||
|  |             }); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |             assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testChatWithImageFromFileWithHistoryRecognition() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaChatRequestBuilder builder = | ||||||
|  |                     OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||||
|  |             OllamaChatRequestModel requestModel = | ||||||
|  |                     builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |                             List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||||
|  |  | ||||||
|  |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |             assertNotNull(chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |             builder.reset(); | ||||||
|  |  | ||||||
|  |             requestModel = | ||||||
|  |                     builder.withMessages(chatResult.getChatHistory()) | ||||||
|  |                             .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); | ||||||
|  |  | ||||||
|  |             chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |             assertNotNull(chatResult.getResponse()); | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testChatWithImageFromURL() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); | ||||||
|  |             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||||
|  |                             "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") | ||||||
|  |                     .build(); | ||||||
|  |  | ||||||
|  |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|  |             assertNotNull(chatResult); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -138,8 +293,8 @@ class TestRealAPIs { | |||||||
|         File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); |         File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||||
|         try { |         try { | ||||||
|             OllamaResult result = |             OllamaResult result = | ||||||
|           ollamaAPI.askWithImageFiles( |                     ollamaAPI.generateWithImageFiles( | ||||||
|               OllamaModelType.LLAVA, |                             config.getImageModel(), | ||||||
|                             "What is in this image?", |                             "What is in this image?", | ||||||
|                             List.of(imageFile), |                             List.of(imageFile), | ||||||
|                             new OptionsBuilder().build()); |                             new OptionsBuilder().build()); | ||||||
| @@ -147,7 +302,31 @@ class TestRealAPIs { | |||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
|             assertFalse(result.getResponse().isEmpty()); |             assertFalse(result.getResponse().isEmpty()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     void testAskModelWithOptionsAndImageFilesStreamed() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); | ||||||
|  |         try { | ||||||
|  |             StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|  |             OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), | ||||||
|  |                     "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { | ||||||
|  |                         LOG.info(s); | ||||||
|  |                         String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|  |                         LOG.info(substring); | ||||||
|  |                         sb.append(substring); | ||||||
|  |                     }); | ||||||
|  |             assertNotNull(result); | ||||||
|  |             assertNotNull(result.getResponse()); | ||||||
|  |             assertFalse(result.getResponse().isEmpty()); | ||||||
|  |             assertEquals(sb.toString().trim(), result.getResponse().trim()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -157,8 +336,8 @@ class TestRealAPIs { | |||||||
|         testEndpointReachability(); |         testEndpointReachability(); | ||||||
|         try { |         try { | ||||||
|             OllamaResult result = |             OllamaResult result = | ||||||
|           ollamaAPI.askWithImageURLs( |                     ollamaAPI.generateWithImageURLs( | ||||||
|               OllamaModelType.LLAVA, |                             config.getImageModel(), | ||||||
|                             "What is in this image?", |                             "What is in this image?", | ||||||
|                             List.of( |                             List.of( | ||||||
|                                     "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), |                                     "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), | ||||||
| @@ -167,7 +346,50 @@ class TestRealAPIs { | |||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
|             assertFalse(result.getResponse().isEmpty()); |             assertFalse(result.getResponse().isEmpty()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|       throw new RuntimeException(e); |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     @Order(3) | ||||||
|  |     public void testEmbedding() { | ||||||
|  |         testEndpointReachability(); | ||||||
|  |         try { | ||||||
|  |             OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder | ||||||
|  |                     .getInstance(config.getModel(), "What is the capital of France?").build(); | ||||||
|  |  | ||||||
|  |             List<Double> embeddings = ollamaAPI.generateEmbeddings(request); | ||||||
|  |  | ||||||
|  |             assertNotNull(embeddings); | ||||||
|  |             assertFalse(embeddings.isEmpty()); | ||||||
|  |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|  |             fail(e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | class Config { | ||||||
|  |     private String ollamaURL; | ||||||
|  |     private String model; | ||||||
|  |     private String imageModel; | ||||||
|  |     private int requestTimeoutSeconds; | ||||||
|  |  | ||||||
|  |     public Config() { | ||||||
|  |         Properties properties = new Properties(); | ||||||
|  |         try (InputStream input = | ||||||
|  |                      getClass().getClassLoader().getResourceAsStream("test-config.properties")) { | ||||||
|  |             if (input == null) { | ||||||
|  |                 throw new RuntimeException("Sorry, unable to find test-config.properties"); | ||||||
|  |             } | ||||||
|  |             properties.load(input); | ||||||
|  |             this.ollamaURL = properties.getProperty("ollama.url"); | ||||||
|  |             this.model = properties.getProperty("ollama.model"); | ||||||
|  |             this.imageModel = properties.getProperty("ollama.model.image"); | ||||||
|  |             this.requestTimeoutSeconds = | ||||||
|  |                     Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds")); | ||||||
|  |         } catch (IOException e) { | ||||||
|  |             throw new RuntimeException("Error loading properties", e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,20 +1,21 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.unittests; | package io.github.amithkoujalgi.ollama4j.unittests; | ||||||
|  |  | ||||||
| import static org.mockito.Mockito.*; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import org.mockito.Mockito; | ||||||
|  |  | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.net.URISyntaxException; | import java.net.URISyntaxException; | ||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.Collections; | import java.util.Collections; | ||||||
| import org.junit.jupiter.api.Test; |  | ||||||
| import org.mockito.Mockito; | import static org.mockito.Mockito.*; | ||||||
|  |  | ||||||
| class TestMockedAPIs { | class TestMockedAPIs { | ||||||
|     @Test |     @Test | ||||||
| @@ -103,10 +104,10 @@ class TestMockedAPIs { | |||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|         OptionsBuilder optionsBuilder = new OptionsBuilder(); |         OptionsBuilder optionsBuilder = new OptionsBuilder(); | ||||||
|         try { |         try { | ||||||
|       when(ollamaAPI.ask(model, prompt, optionsBuilder.build())) |             when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build())) | ||||||
|                     .thenReturn(new OllamaResult("", 0, 200)); |                     .thenReturn(new OllamaResult("", 0, 200)); | ||||||
|       ollamaAPI.ask(model, prompt, optionsBuilder.build()); |             ollamaAPI.generate(model, prompt, false, optionsBuilder.build()); | ||||||
|       verify(ollamaAPI, times(1)).ask(model, prompt, optionsBuilder.build()); |             verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|             throw new RuntimeException(e); |             throw new RuntimeException(e); | ||||||
|         } |         } | ||||||
| @@ -118,13 +119,14 @@ class TestMockedAPIs { | |||||||
|         String model = OllamaModelType.LLAMA2; |         String model = OllamaModelType.LLAMA2; | ||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|         try { |         try { | ||||||
|       when(ollamaAPI.askWithImageFiles( |             when(ollamaAPI.generateWithImageFiles( | ||||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) |                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||||
|                     .thenReturn(new OllamaResult("", 0, 200)); |                     .thenReturn(new OllamaResult("", 0, 200)); | ||||||
|       ollamaAPI.askWithImageFiles( |             ollamaAPI.generateWithImageFiles( | ||||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); |                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||||
|             verify(ollamaAPI, times(1)) |             verify(ollamaAPI, times(1)) | ||||||
|           .askWithImageFiles(model, prompt, Collections.emptyList(), new OptionsBuilder().build()); |                     .generateWithImageFiles( | ||||||
|  |                             model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|             throw new RuntimeException(e); |             throw new RuntimeException(e); | ||||||
|         } |         } | ||||||
| @@ -136,13 +138,14 @@ class TestMockedAPIs { | |||||||
|         String model = OllamaModelType.LLAMA2; |         String model = OllamaModelType.LLAMA2; | ||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|         try { |         try { | ||||||
|       when(ollamaAPI.askWithImageURLs( |             when(ollamaAPI.generateWithImageURLs( | ||||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) |                     model, prompt, Collections.emptyList(), new OptionsBuilder().build())) | ||||||
|                     .thenReturn(new OllamaResult("", 0, 200)); |                     .thenReturn(new OllamaResult("", 0, 200)); | ||||||
|       ollamaAPI.askWithImageURLs( |             ollamaAPI.generateWithImageURLs( | ||||||
|                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); |                     model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||||
|             verify(ollamaAPI, times(1)) |             verify(ollamaAPI, times(1)) | ||||||
|           .askWithImageURLs(model, prompt, Collections.emptyList(), new OptionsBuilder().build()); |                     .generateWithImageURLs( | ||||||
|  |                             model, prompt, Collections.emptyList(), new OptionsBuilder().build()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { |         } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { | ||||||
|             throw new RuntimeException(e); |             throw new RuntimeException(e); | ||||||
|         } |         } | ||||||
| @@ -153,9 +156,9 @@ class TestMockedAPIs { | |||||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); |         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||||
|         String model = OllamaModelType.LLAMA2; |         String model = OllamaModelType.LLAMA2; | ||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|     when(ollamaAPI.askAsync(model, prompt)) |         when(ollamaAPI.generateAsync(model, prompt, false)) | ||||||
|         .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); |                 .thenReturn(new OllamaAsyncResultStreamer(null, null, 3)); | ||||||
|     ollamaAPI.askAsync(model, prompt); |         ollamaAPI.generateAsync(model, prompt, false); | ||||||
|     verify(ollamaAPI, times(1)).askAsync(model, prompt); |         verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  | import static org.junit.jupiter.api.Assertions.fail; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  |  | ||||||
|  | public abstract class AbstractSerializationTest<T> { | ||||||
|  |  | ||||||
|  |     protected ObjectMapper mapper = Utils.getObjectMapper(); | ||||||
|  |  | ||||||
|  |     protected String serialize(T obj) { | ||||||
|  |         try { | ||||||
|  |             return mapper.writeValueAsString(obj); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             fail("Could not serialize request!", e); | ||||||
|  |             return null; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected T deserialize(String jsonObject, Class<T> deserializationClass) { | ||||||
|  |         try { | ||||||
|  |             return mapper.readValue(jsonObject, deserializationClass); | ||||||
|  |         } catch (JsonProcessingException e) { | ||||||
|  |             fail("Could not deserialize jsonObject!", e); | ||||||
|  |             return null; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     protected void assertEqualsAfterUnmarshalling(T unmarshalledObject, | ||||||
|  |         T req) { | ||||||
|  |         assertEquals(req, unmarshalledObject); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  |  | ||||||
|  | import java.io.File; | ||||||
|  | import java.util.List; | ||||||
|  |  | ||||||
|  | import org.json.JSONObject; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> { | ||||||
|  |  | ||||||
|  |     private OllamaChatRequestBuilder builder; | ||||||
|  |  | ||||||
|  |     @BeforeEach | ||||||
|  |     public void init() { | ||||||
|  |         builder = OllamaChatRequestBuilder.getInstance("DummyModel"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestMultipleMessages() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") | ||||||
|  |         .withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |         .build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithMessageAndImage() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", | ||||||
|  |                 List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithOptions() { | ||||||
|  |         OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |             .withOptions(b.setMirostat(1).build()) | ||||||
|  |             .withOptions(b.setTemperature(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatEta(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatTau(1L).build()) | ||||||
|  |             .withOptions(b.setNumGpu(1).build()) | ||||||
|  |             .withOptions(b.setSeed(1).build()) | ||||||
|  |             .withOptions(b.setTopK(1).build()) | ||||||
|  |             .withOptions(b.setTopP(1).build()) | ||||||
|  |             .build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("num_gpu")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("seed")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("top_k")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("top_p")); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithJsonFormat() { | ||||||
|  |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|  |                 .withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|  |         // of request is never used in real code anyways | ||||||
|  |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|  |         String requestFormatProperty = jsonObject.getString("format"); | ||||||
|  |         assertEquals("json", requestFormatProperty); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithTemplate() { | ||||||
|  |         OllamaChatRequestModel req = builder.withTemplate("System Template") | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithStreaming() { | ||||||
|  |         OllamaChatRequestModel req = builder.withStreaming().build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithKeepAlive() { | ||||||
|  |         String expectedKeepAlive = "5m"; | ||||||
|  |         OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,37 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> { | ||||||
|  |  | ||||||
|  |         private OllamaEmbeddingsRequestBuilder builder; | ||||||
|  |  | ||||||
|  |         @BeforeEach | ||||||
|  |         public void init() { | ||||||
|  |             builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |             @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaEmbeddingsRequestModel req = builder.build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |         @Test | ||||||
|  |         public void testRequestWithOptions() { | ||||||
|  |             OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |             OllamaEmbeddingsRequestModel req = builder | ||||||
|  |                     .withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|  |             String jsonRequest = serialize(req); | ||||||
|  |             OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class); | ||||||
|  |             assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |             assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |         } | ||||||
|  | } | ||||||
| @@ -0,0 +1,56 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import static org.junit.jupiter.api.Assertions.assertEquals; | ||||||
|  |  | ||||||
|  | import org.json.JSONObject; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> { | ||||||
|  |  | ||||||
|  |     private OllamaGenerateRequestBuilder builder; | ||||||
|  |  | ||||||
|  |     @BeforeEach | ||||||
|  |     public void init() { | ||||||
|  |         builder = OllamaGenerateRequestBuilder.getInstance("DummyModel"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestOnlyMandatoryFields() { | ||||||
|  |         OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testRequestWithOptions() { | ||||||
|  |         OptionsBuilder b = new OptionsBuilder(); | ||||||
|  |         OllamaGenerateRequestModel req = | ||||||
|  |                 builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithJsonFormat() { | ||||||
|  |         OllamaGenerateRequestModel req = | ||||||
|  |                 builder.withPrompt("Some prompt").withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|  |         // of request is never used in real code anyways | ||||||
|  |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|  |         String requestFormatProperty = jsonObject.getString("format"); | ||||||
|  |         assertEquals("json", requestFormatProperty); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,42 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.Model; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  | public class TestModelRequestSerialization extends AbstractSerializationTest<Model> { | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testDeserializationOfModelResponseWithOffsetTime(){ | ||||||
|  |         String serializedTestStringWithOffsetTime = "{\n" | ||||||
|  |                 + "\"name\": \"codellama:13b\",\n" | ||||||
|  |                 + "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n" | ||||||
|  |                 + "\"size\": 7365960935,\n" | ||||||
|  |                 + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" | ||||||
|  |                 + "\"details\": {\n" | ||||||
|  |                 + "\"format\": \"gguf\",\n" | ||||||
|  |                 + "\"family\": \"llama\",\n" | ||||||
|  |                 + "\"families\": null,\n" | ||||||
|  |                 + "\"parameter_size\": \"13B\",\n" | ||||||
|  |                 + "\"quantization_level\": \"Q4_0\"\n" | ||||||
|  |                 + "}}"; | ||||||
|  |         deserialize(serializedTestStringWithOffsetTime,Model.class); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testDeserializationOfModelResponseWithZuluTime(){ | ||||||
|  |         String serializedTestStringWithZuluTimezone = "{\n" | ||||||
|  |                 + "\"name\": \"codellama:13b\",\n" | ||||||
|  |                 + "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n" | ||||||
|  |                 + "\"size\": 7365960935,\n" | ||||||
|  |                 + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" | ||||||
|  |                 + "\"details\": {\n" | ||||||
|  |                 + "\"format\": \"gguf\",\n" | ||||||
|  |                 + "\"family\": \"llama\",\n" | ||||||
|  |                 + "\"families\": null,\n" | ||||||
|  |                 + "\"parameter_size\": \"13B\",\n" | ||||||
|  |                 + "\"quantization_level\": \"Q4_0\"\n" | ||||||
|  |                 + "}}"; | ||||||
|  |         deserialize(serializedTestStringWithZuluTimezone,Model.class); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,2 +1,4 @@ | |||||||
| ollama.api.url=http://192.168.29.223:11434 | ollama.url=http://localhost:11434 | ||||||
| ollama.model=llava | ollama.model=qwen:0.5b | ||||||
|  | ollama.model.image=llava | ||||||
|  | ollama.request-timeout-seconds=120 | ||||||
		Reference in New Issue
	
	Block a user