mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-10-31 16:40:41 +01:00 
			
		
		
		
	Compare commits
	
		
			117 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 4a91918e84 | ||
|   | ff3344616c | ||
|   | 726fea5b74 | ||
|   | a09f1362e9 | ||
|   | 4ef0821932 | ||
|   | 2d3cf228cb | ||
|   | 5b3713c69e | ||
|   | e9486cbb8e | ||
|   | 057f0babeb | ||
|   | da146640ca | ||
|   | 82be761b86 | ||
|   | 9c3fc49df1 | ||
|   | 5f19eb17ac | ||
|   | ecb04d6d82 | ||
|   | 3fc7e9423c | ||
|   | 405a08b330 | ||
|   | 921f745435 | ||
|   | bedfec6bf9 | ||
|   | afa09e87a5 | ||
|   | baf2320ea6 | ||
|   | 948a7444fb | ||
|   | ec0eb8b469 | ||
|   | 8f33de7e59 | ||
|   | 8c59e6511b | ||
|   | b93fc7623a | ||
|   | bd1a57c7e0 | ||
|   | 7fabead249 | ||
|   | 268a973d5e | ||
|   | d949a3cb69 | ||
|   | e2443ed68a | ||
|   | 37193b1f5b | ||
|   | e33071ae38 | ||
|   | fffc8dc526 | ||
|   | def950cc9c | ||
|   | f4db7ca326 | ||
|   | 18760250ea | ||
|   | 233597efd1 | ||
|   | cec9f29eb7 | ||
|   | 20cb92a418 | ||
|   | b0dc38954b | ||
|   | 1479d0a494 | ||
|   | b328daee43 | ||
|   | b90c8bc622 | ||
|   | 660a1b937a | ||
|   | fd961d7037 | ||
|   | b48f9550c3 | ||
|   | 363969a275 | ||
|   | 992625cf86 | ||
|   | bbebd26d07 | ||
|   | 3aa0fc77cb | ||
|   | 11a98a72a1 | ||
|   | 422601c0fc | ||
|   | 75e6576a13 | ||
|   | 51dd3f3e1e | ||
|   | 30250f79d9 | ||
|   | d4ee9ed051 | ||
|   | 4412ac683a | ||
|   | b5b1a26941 | ||
|   | a84230bbd1 | ||
|   | 00c9b16556 | ||
|   | 9a2194334f | ||
|   | f9cf11ecdf | ||
|   | 0af80865c3 | ||
|   | a304c01194 | ||
|   | 887708864e | ||
|   | 2f0c4fdcc9 | ||
|   | 73aabd7ca6 | ||
|   | 17ca2bdee3 | ||
|   | e43bd3acb4 | ||
|   | 0b041f4340 | ||
|   | 6c6062b757 | ||
|   | 68fd8b7cc8 | ||
|   | bb6f8aa343 | ||
|   | 12802be0bc | ||
|   | bd56ccfef7 | ||
|   | 51563f276f | ||
|   | 6e282124bf | ||
|   | 3ab9e4c283 | ||
|   | 2db6a22cc7 | ||
|   | cc69341620 | ||
|   | 4589a9032c | ||
|   | da273402b5 | ||
|   | cfa8aa14d7 | ||
|   | bc4e8303aa | ||
|   | f2f740a2a0 | ||
|   | 4cbb783a61 | ||
|   | 5c9e0b7d8a | ||
|   | 2f8577a24d | ||
|   | 02116b7025 | ||
|   | f3778f8786 | ||
|   | c6141634db | ||
|   | d9f98ad901 | ||
|   | 79d97445b8 | ||
|   | 1c40697c96 | ||
|   | f03026abb3 | ||
|   | 63a6e81ac2 | ||
|   | 76cad0f584 | ||
|   | bee2908d1e | ||
|   | 8a4c9fd969 | ||
|   | d470f940b0 | ||
|   | df402efaba | ||
|   | 677362abbf | ||
|   | 81689be194 | ||
|   | fd93036d08 | ||
|   | c9b05a725b | ||
|   | a4e1b4afe9 | ||
|   | 3d21813abb | ||
|   | 383d0f56ca | ||
|   | af1b213a76 | ||
|   | fed89a9643 | ||
|   | fd32aa33ff | ||
|   | b8a13e89b1 | ||
|   | c8f27edd6e | ||
|   | 5a936d8174 | ||
|   | 9b5ddbf4c4 | ||
|   | 7c233d5734 | ||
|   | e85aeae6e0 | 
							
								
								
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | ||||
| name: Release Artifacts to GitHub Maven Packages | ||||
|  | ||||
| on: | ||||
|   release: | ||||
|     types: [ created ] | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       contents: read | ||||
|       packages: write | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|       - name: Set up JDK 17 | ||||
|         uses: actions/setup-java@v3 | ||||
|         with: | ||||
|           java-version: '17' | ||||
|           distribution: 'temurin' | ||||
|           server-id: github | ||||
|           settings-path: ${{ github.workspace }} | ||||
|  | ||||
|       - name: maven-settings-xml-action | ||||
|         uses: whelk-io/maven-settings-xml-action@v22 | ||||
|         with: | ||||
|           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||
|  | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
|           find: "ollama4j-revision" | ||||
|           replace: ${{ github.ref_name }} | ||||
|           regex: false | ||||
|  | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
|           find: "mvn-repo-id" | ||||
|           replace: github | ||||
|           regex: false | ||||
|  | ||||
|       - name: Import GPG key | ||||
|         uses: crazy-max/ghaction-import-gpg@v6 | ||||
|         with: | ||||
|           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||
|           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||
|       - name: List keys | ||||
|         run: gpg -K | ||||
|  | ||||
|       - name: Build with Maven | ||||
|         run: mvn --file pom.xml -U clean package -Punit-tests | ||||
|  | ||||
|       - name: Publish to GitHub Packages Apache Maven | ||||
|         run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
							
								
								
									
										68
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										68
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,22 +1,31 @@ | ||||
| # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||
| # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||
|  | ||||
| name: Release Artifacts | ||||
| name: Release Artifacts to Maven Central | ||||
|  | ||||
| on: | ||||
|   release: | ||||
|     types: [ created ] | ||||
|  | ||||
|  | ||||
| #on: | ||||
| #  pull_request: | ||||
| #    types: [ opened, reopened ] | ||||
| #    branches: [ "main" ] | ||||
|  | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     permissions: | ||||
|       contents: read | ||||
|       contents: write | ||||
|       packages: write | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|  | ||||
|       - name: Set up JDK 17 | ||||
|         uses: actions/setup-java@v3 | ||||
|         with: | ||||
| @@ -25,6 +34,19 @@ jobs: | ||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||
|  | ||||
|       - name: maven-settings-xml-action | ||||
|         uses: whelk-io/maven-settings-xml-action@v22 | ||||
|         with: | ||||
|           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||
|  | ||||
|       - name: Import GPG key | ||||
|         uses: crazy-max/ghaction-import-gpg@v6 | ||||
|         with: | ||||
|           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||
|           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||
|       - name: List keys | ||||
|         run: gpg -K | ||||
|  | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
| @@ -32,10 +54,42 @@ jobs: | ||||
|           replace: ${{ github.ref_name }} | ||||
|           regex: false | ||||
|  | ||||
|       - name: Build with Maven | ||||
|         run: mvn --file pom.xml -U clean package -Punit-tests | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
|           find: "mvn-repo-id" | ||||
|           replace: central | ||||
|           regex: false | ||||
|  | ||||
|       - name: Publish to GitHub Packages Apache Maven | ||||
|         run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml --file pom.xml | ||||
|       - name: Publish to Maven Central | ||||
|         run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }} | ||||
|  | ||||
|       - name: Upload Release Asset - JAR | ||||
|         uses: actions/upload-release-asset@v1 | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ github.token }} | ||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|         with: | ||||
|           upload_url: ${{ github.event.release.upload_url }} | ||||
|           asset_path: target/ollama4j-${{ github.ref_name }}.jar | ||||
|           asset_name: ollama4j-${{ github.ref_name }}.jar | ||||
|           asset_content_type: application/x-jar | ||||
|  | ||||
|       - name: Upload Release Asset - Javadoc JAR | ||||
|         uses: actions/upload-release-asset@v1 | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|         with: | ||||
|           upload_url: ${{ github.event.release.upload_url }} | ||||
|           asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar | ||||
|           asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar | ||||
|           asset_content_type: application/x-jar | ||||
|  | ||||
|       - name: Upload Release Asset - Sources JAR | ||||
|         uses: actions/upload-release-asset@v1 | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|         with: | ||||
|           upload_url: ${{ github.event.release.upload_url }} | ||||
|           asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar | ||||
|           asset_name: ollama4j-${{ github.ref_name }}-sources.jar | ||||
|           asset_content_type: application/x-jar | ||||
							
								
								
									
										7
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							| @@ -46,6 +46,13 @@ jobs: | ||||
|       - run: cd docs && npm ci | ||||
|       - run: cd docs && npm run build | ||||
|  | ||||
|       - name: Find and Replace | ||||
|         uses: jacobtomlinson/gha-find-replace@v3 | ||||
|         with: | ||||
|           find: "ollama4j-revision" | ||||
|           replace: ${{ github.ref_name }} | ||||
|           regex: false | ||||
|  | ||||
|       - name: Build with Maven | ||||
|         run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs | ||||
|  | ||||
|   | ||||
							
								
								
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -37,6 +37,8 @@ build/ | ||||
| ### Mac OS ### | ||||
| .DS_Store | ||||
| /.idea/ | ||||
| /src/main/java/io/github/amithkoujalgi/ollama4j/core/localtests/ | ||||
| pom.xml.* | ||||
| release.properties | ||||
| release.properties | ||||
| !.idea/icon.svg | ||||
|  | ||||
| src/main/java/io/github/ollama4j/localtests | ||||
							
								
								
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.idea/icon.svg
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <svg version="1.1" viewBox="0 0 1478 2048" width="1280" height="1280" xmlns="http://www.w3.org/2000/svg"> | ||||
| <path transform="translate(0)" d="m0 0h1478v2048h-1478z" fill="#FEFEFE"/> | ||||
| <path transform="translate(411,47)" d="m0 0h24l21 5 17 8 14 10 12 11 10 10 12 16 14 24 11 24 9 24 8 27 6 25 4 21 3 19 3 25 6-2 16-9 29-13 28-10 30-8 26-4 27-2h16l30 2 32 5 19 5 30 10 26 11 20 10 13 8 2-15 6-39 8-36 6-20 9-27 11-24 10-19 12-18 9-11 9-10 12-11 17-11 15-7 19-4h24l18 4 16 7 12 8 10 8 17 17 13 18 12 22 9 20 7 19 9 30 7 33 5 33 3 29 1 15v79l-3 30-4 29-4 20 16 15 17 17 8 7 18 18 9 11 10 12 14 21 9 16 8 16 5 17 7 19 10 34 5 27 3 24 1 14v42l-4 35-6 29-8 27-9 22-12 25-13 22-5 7 2 6 14 29 12 31 8 26 7 29 6 36 2 21 1 19v37l-3 34-4 25-5 24-8 27-8 21-7 16-11 21-15 24 2 5 7 10 8 15 11 29 8 29 6 31 3 22 2 24v57l-4 33-6 27-3 9-3 1h-89l-2-1v-11l2-13 6-21 3-19 1-9v-48l-3-31-4-22-7-27-6-16-8-16-12-21-4-11-3-17v-31l4-13 6-10 11-16 9-15 11-23 10-31 6-26 3-22 1-16v-33l-2-27-4-27-10-39-9-25-8-18-13-25-12-19-4-10-1-5v-13l3-11 4-8 9-10 13-17 8-13 8-14 11-27 7-25 4-21 2-20v-27l-2-22-5-27-6-21-8-22-12-25-8-14-11-16-8-10-11-13-13-13-8-7-17-13-18-11-17-9-15-6-23-7-14-3-17-2h-28l-18 2h-18l-10-3-6-5-16-32-8-14-11-15-8-10-9-10-7-7-14-11-12-9-16-10-19-10-13-6-20-8-17-5-24-5-15-2h-33l-25 4-24 6-22 8-20 9-20 11-19 13-10 8-11 9-13 13-13 17-10 15-10 18-8 18-9 10-6 3h-21l-19-2h-29l-20 3-14 3-27 9-21 10-18 11-16 12-15 13-15 15-11 14-12 17-10 17-8 16-10 25-7 24-5 24-3 25v31l4 30 5 21 9 27 12 25 10 16 7 9 16 15 6 12 3 9v15l-6 16-13 21-14 27-8 20-8 25-7 27-4 23-3 31v35l3 32 5 26 9 30 6 15 10 21 11 17 12 16 8 13 4 13v19l-4 13-12 22-9 15-8 16-7 19-7 26-5 30-2 23v42l3 26 5 22 3 12 1 9v10l-3 1h-81l-11-1-5-21-5-30-2-22v-52l2-25 5-34 5-23 7-25 8-21 11-23 9-12-1-5-14-22-10-19-11-25-10-30-6-24-5-29-3-27-1-17v-35l2-30 4-29 5-26 10-36 9-25 10-23 10-21-1-7-10-14-14-26-7-15-8-20-8-26-6-29-3-25v-66l3-27 7-33 9-29 10-25 8-16 9-17 11-17 11-15 11-13 7-8 56-56-1-6-2-5-4-26-3-32-1-17v-69l3-39 5-35 6-29 8-30 8-23 12-27 12-21 12-16 11-12 7-7 13-10 16-9 11-4z" fill="#010000"/> | ||||
| <path transform="translate(856,1181)" d="m0 0h13l10 4 6 7 4 9 6 29 5 22 8 16 4-13 7-23 5-12 6-9 9-8 7-3 5-1h10l8 4 5 8v11l-6 17-6 15-4 16v22l8 38 1 9v11l-3 16-8 16-9 9-10 8-6 7-4 8-2 7-1 12v51l-2 17-4 13-11 20-5 15-3 17v21l3 17 6 16 11 28 13 38 10 37 7 33 5 33 3 28 1 18v49l-2 24-4 22-6 18-6 10-7 8-10 6-13 4h-17l-7-4-10-9-11-15-11-16-12-17-9-11-9-10-10-9-13-8-14-5-5-1h-26l-16 4-18 8-18 11-16 12-16 13-17 14-20 15-16 9-13 4h-11l-10-3-7-6-4-8-2-9v-39l2-25-6 8-2 1h-8l-13-4-8-7-4-7v-9l6-12 8-10 9-11 9-14 5-12 2-11v-17l-4-20-6-21-2-13v-16l2-12 8-16 9-13 12-16 13-21 8-17 9-27 4-20 4-39 3-39 3-63v-98l-3-35-3-13 5 2 16 11 13 10 11 9 14 12 17 16 33 33 7 8 12 13 9 11 12 14 8 10 10 13 12 16 13 18 18 27 12 19 6 8 6 4 9 1 12-3 10-6 8-11 4-11v-33l-3-17-4-11-5-7-6-3-15-4-16-9-16-8-4-1h-12l-23 5-8-1-7-6-4-10v-10l4-8 9-8 13-6 13-4 10-1-9-11-8-10-10-15-8-16-7-15-9-27-1-5v-13l3-8 8-8 9-4 6-1 8 3 7 9 15 31 8 12 8 9 2 1-6-21-4-20-1-8v-33l3-10 4-5z" fill="#020101"/> | ||||
| <path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5z" fill="#FEFEFE"/> | ||||
| <path transform="translate(816,1496)" d="m0 0 5 1 13 21 10 18 14 27 15 31 17 40 10 27 12 36 8 28 7 30 5 28 3 28v60l-2 31-3 23-5 17-4 6-5 4-4 1h-14l-6-4-11-14-10-15-12-17-9-11-12-14-8-7-14-10-16-8-12-4-12-2h-20l-16 3-15 5-16 8-18 12-14 11-15 13-14 13-22 18-14 7-4 1h-7l-5-6-3-13v-29l3-32 6-45 11-66 20-100 13-61 2-6 11-7 4-2 7 11 10 10 13 8 18 6 6 1h25l17-4 16-7 13-9 7-6 9-11 8-14 5-15 2-10v-20l-3-11z" fill="#FEFEFE"/> | ||||
| <path transform="translate(735,724)" d="m0 0h30l24 2 27 4 20 5 27 9 29 14 18 11 16 12 11 9 15 14 12 14 10 14 9 15 7 14 7 19 5 20 2 14v34l-3 20-6 19-6 15-11 19-9 12-11 13-15 15-11 9-16 11-22 12-26 10-13 4-21 5-19 2h-117l-24-3-27-6-28-10-16-8-14-8-14-10-10-8-10-9-10-10-11-14-10-15-10-21-6-18-4-19-1-9v-31l2-15 5-20 8-21 10-19 8-12 10-13 12-13 13-13 11-9 15-11 15-9 14-8 21-9 16-6 22-6 29-5zm0 63-20 2-20 4-29 10-17 8-17 10-17 13-15 14-9 11-9 14-9 19-6 20-2 14v11l3 16 6 18 7 14 8 11 11 12 10 9 18 12 16 8 15 6 25 6 15 2 14 1h89l21-3 25-6 26-11 15-9 10-8 10-9 8-8 12-18 6-13 5-16 2-12v-15l-2-14-5-16-5-12-7-13-12-16-12-13-8-7-16-12-14-8-15-8-28-10-21-5-14-2-13-1z" fill="#010101"/> | ||||
| <path transform="translate(1081,140)" d="m0 0h5l5 4 9 11 11 19 11 28 6 21 7 32 4 27 3 42v49l-3 47-1 4-6-1-10-4-22-4-44-6-27-2-9-15-2-5v-40l2-34 5-38 8-38 5-20 11-29 11-23 7-10 11-13z" fill="#FEFEFE"/> | ||||
| <path transform="translate(423,139)" d="m0 0 4 2 10 10 10 14 11 22 9 24 7 25 6 29 5 30 3 31 1 16v45l-6 14-5 6-29 2-31 4-35 6-11 4h-3l-3-28-1-27v-41l2-36 5-35 8-37 6-19 8-21 8-16 8-12 8-9z" fill="#FEFEFE"/> | ||||
| <path transform="translate(745,1472)" d="m0 0h9l16 3 14 7 10 9 6 10 3 9 1 6v15l-4 14-8 16-9 10-9 8-15 8-12 4-10 2h-15l-13-3-16-8-11-10-6-10-5-12-2-11v-8l2-10h2l1-5 4-8 8-10 11-9 17-9 12-5 8-2z" fill="red"/> | ||||
| <path transform="translate(436,735)" d="m0 0h16l15 4 12 7 10 9 7 9 5 11 2 8v21l-4 14-6 12-7 9-14 14-11 7-12 4h-15l-14-3-11-4-11-7-9-10-8-14-2-9v-21l4-14 8-16 6-9 10-10 14-8 9-3z" fill="#010101"/> | ||||
| <path transform="translate(1055,735)" d="m0 0h15l16 4 11 6 10 8 7 9 8 15 5 14 1 6v20l-4 13-7 11-7 8-14 9-16 5-5 1h-16l-13-4-11-7-17-17-8-14-5-14-1-5v-20l4-13 6-10 9-10 11-8 11-5z" fill="#010101"/> | ||||
| <path transform="translate(717,869)" d="m0 0h9l12 4 13 8 5-1 8-6 9-4 12-1 10 3 6 4 6 9 1 2v15l-5 10-8 7-11 8-6 4-1 6 3 17v19l-5 8-9 6-8 2h-10l-11-2-8-6-4-6-1-3v-15l3-19v-7l-16-10-11-11-3-5-1-4v-13l5-10 6-5z" fill="#020101"/> | ||||
| <path transform="translate(717,1479)" d="m0 0 2 1-2 3h2v4 2l6 1 2 1 3 13-1 10-5 10h-2v2h-2v2h-2v2l-5 2-3 2-9 2v-2l-5 1-9-5-5-4v-2h-2l-2-2-6 3 1-7 5-10 8-10 11-9 17-9z" fill="pink"/> | ||||
| <path transform="translate(599,1667)" d="m0 0 4 1v14l-9 48-3 19-2 1-8-20-3-11v-15l5-15 8-14 6-7z" fill="white"/> | ||||
| <path transform="translate(937,1063)" d="m0 0 2 1-11 9-15 10-19 10-26 10-13 4-21 5-19 2h-117l-9-1v-1h82l37-1 18-2 32-7 14-5 16-6 10-4 17-9 11-7z" fill="#553D3C"/> | ||||
| </svg> | ||||
| After Width: | Height: | Size: 6.1 KiB | 
							
								
								
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,128 @@ | ||||
| # Contributor Covenant Code of Conduct | ||||
|  | ||||
| ## Our Pledge | ||||
|  | ||||
| We as members, contributors, and leaders pledge to make participation in our | ||||
| community a harassment-free experience for everyone, regardless of age, body | ||||
| size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||
| identity and expression, level of experience, education, socio-economic status, | ||||
| nationality, personal appearance, race, religion, or sexual identity | ||||
| and orientation. | ||||
|  | ||||
| We pledge to act and interact in ways that contribute to an open, welcoming, | ||||
| diverse, inclusive, and healthy community. | ||||
|  | ||||
| ## Our Standards | ||||
|  | ||||
| Examples of behavior that contributes to a positive environment for our | ||||
| community include: | ||||
|  | ||||
| * Demonstrating empathy and kindness toward other people | ||||
| * Being respectful of differing opinions, viewpoints, and experiences | ||||
| * Giving and gracefully accepting constructive feedback | ||||
| * Accepting responsibility and apologizing to those affected by our mistakes, | ||||
|   and learning from the experience | ||||
| * Focusing on what is best not just for us as individuals, but for the | ||||
|   overall community | ||||
|  | ||||
| Examples of unacceptable behavior include: | ||||
|  | ||||
| * The use of sexualized language or imagery, and sexual attention or | ||||
|   advances of any kind | ||||
| * Trolling, insulting or derogatory comments, and personal or political attacks | ||||
| * Public or private harassment | ||||
| * Publishing others' private information, such as a physical or email | ||||
|   address, without their explicit permission | ||||
| * Other conduct which could reasonably be considered inappropriate in a | ||||
|   professional setting | ||||
|  | ||||
| ## Enforcement Responsibilities | ||||
|  | ||||
| Community leaders are responsible for clarifying and enforcing our standards of | ||||
| acceptable behavior and will take appropriate and fair corrective action in | ||||
| response to any behavior that they deem inappropriate, threatening, offensive, | ||||
| or harmful. | ||||
|  | ||||
| Community leaders have the right and responsibility to remove, edit, or reject | ||||
| comments, commits, code, wiki edits, issues, and other contributions that are | ||||
| not aligned to this Code of Conduct, and will communicate reasons for moderation | ||||
| decisions when appropriate. | ||||
|  | ||||
| ## Scope | ||||
|  | ||||
| This Code of Conduct applies within all community spaces, and also applies when | ||||
| an individual is officially representing the community in public spaces. | ||||
| Examples of representing our community include using an official e-mail address, | ||||
| posting via an official social media account, or acting as an appointed | ||||
| representative at an online or offline event. | ||||
|  | ||||
| ## Enforcement | ||||
|  | ||||
| Instances of abusive, harassing, or otherwise unacceptable behavior may be | ||||
| reported to the community leaders responsible for enforcement at | ||||
| koujalgi.amith@gmail.com. | ||||
| All complaints will be reviewed and investigated promptly and fairly. | ||||
|  | ||||
| All community leaders are obligated to respect the privacy and security of the | ||||
| reporter of any incident. | ||||
|  | ||||
| ## Enforcement Guidelines | ||||
|  | ||||
| Community leaders will follow these Community Impact Guidelines in determining | ||||
| the consequences for any action they deem in violation of this Code of Conduct: | ||||
|  | ||||
| ### 1. Correction | ||||
|  | ||||
| **Community Impact**: Use of inappropriate language or other behavior deemed | ||||
| unprofessional or unwelcome in the community. | ||||
|  | ||||
| **Consequence**: A private, written warning from community leaders, providing | ||||
| clarity around the nature of the violation and an explanation of why the | ||||
| behavior was inappropriate. A public apology may be requested. | ||||
|  | ||||
| ### 2. Warning | ||||
|  | ||||
| **Community Impact**: A violation through a single incident or series | ||||
| of actions. | ||||
|  | ||||
| **Consequence**: A warning with consequences for continued behavior. No | ||||
| interaction with the people involved, including unsolicited interaction with | ||||
| those enforcing the Code of Conduct, for a specified period of time. This | ||||
| includes avoiding interactions in community spaces as well as external channels | ||||
| like social media. Violating these terms may lead to a temporary or | ||||
| permanent ban. | ||||
|  | ||||
| ### 3. Temporary Ban | ||||
|  | ||||
| **Community Impact**: A serious violation of community standards, including | ||||
| sustained inappropriate behavior. | ||||
|  | ||||
| **Consequence**: A temporary ban from any sort of interaction or public | ||||
| communication with the community for a specified period of time. No public or | ||||
| private interaction with the people involved, including unsolicited interaction | ||||
| with those enforcing the Code of Conduct, is allowed during this period. | ||||
| Violating these terms may lead to a permanent ban. | ||||
|  | ||||
| ### 4. Permanent Ban | ||||
|  | ||||
| **Community Impact**: Demonstrating a pattern of violation of community | ||||
| standards, including sustained inappropriate behavior,  harassment of an | ||||
| individual, or aggression toward or disparagement of classes of individuals. | ||||
|  | ||||
| **Consequence**: A permanent ban from any sort of public interaction within | ||||
| the community. | ||||
|  | ||||
| ## Attribution | ||||
|  | ||||
| This Code of Conduct is adapted from the [Contributor Covenant][homepage], | ||||
| version 2.0, available at | ||||
| https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. | ||||
|  | ||||
| Community Impact Guidelines were inspired by [Mozilla's code of conduct | ||||
| enforcement ladder](https://github.com/mozilla/diversity). | ||||
|  | ||||
| [homepage]: https://www.contributor-covenant.org | ||||
|  | ||||
| For answers to common questions about this code of conduct, see the FAQ at | ||||
| https://www.contributor-covenant.org/faq. Translations are available at | ||||
| https://www.contributor-covenant.org/translations. | ||||
							
								
								
									
										8
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								Makefile
									
									
									
									
									
								
							| @@ -1,19 +1,19 @@ | ||||
| build: | ||||
| 	mvn -B clean install | ||||
|  | ||||
| ut: | ||||
| unit-tests: | ||||
| 	mvn clean test -Punit-tests | ||||
|  | ||||
| it: | ||||
| integration-tests: | ||||
| 	mvn clean verify -Pintegration-tests | ||||
|  | ||||
| doxygen: | ||||
| 	doxygen Doxyfile | ||||
|  | ||||
| list-releases: | ||||
| 	curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=asc&page=0&size=12&filter=namespace%3Aio.github.amithkoujalgi%2Cname%3Aollama4j' \ | ||||
| 	curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \ | ||||
|       --compressed \ | ||||
|       --silent | jq '.components[].version' | ||||
|       --silent | jq -r '.components[].version' | ||||
|  | ||||
| build-docs: | ||||
| 	npm i --prefix docs && npm run build --prefix docs | ||||
|   | ||||
							
								
								
									
										296
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										296
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,46 +1,51 @@ | ||||
| <div style="text-align: center"> | ||||
|  | ||||
| ### Ollama4j | ||||
|  | ||||
| <img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | ||||
| <p align="center"> | ||||
|   <img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | ||||
| </p> | ||||
|  | ||||
| A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. | ||||
| <div align="center"> | ||||
| A Java library (wrapper/binding) for Ollama server. | ||||
|  | ||||
| Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | ||||
| Find more details on the [website](https://ollama4j.github.io/ollama4j/). | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| [](https://codecov.io/gh/amithkoujalgi/ollama4j) | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
|  | ||||
| [](https://codecov.io/gh/ollama4j/ollama4j) | ||||
|  | ||||
|  | ||||
| </div> | ||||
|  | ||||
| [//]: # () | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
| [//]: # () | ||||
|  | ||||
| ## Table of Contents | ||||
|  | ||||
| - [How does it work?](#how-does-it-work) | ||||
| - [Requirements](#requirements) | ||||
| - [Installation](#installation) | ||||
| - [API Spec](https://amithkoujalgi.github.io/ollama4j/docs/category/apis---model-management) | ||||
| - [Javadoc Spec](https://amithkoujalgi.github.io/ollama4j/apidocs/) | ||||
| - [API Spec](https://ollama4j.github.io/ollama4j/category/apis---model-management) | ||||
| - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) | ||||
| - [Development](#development) | ||||
| - [Contributions](#get-involved) | ||||
| - [References](#references) | ||||
| @@ -61,38 +66,106 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | ||||
|  | ||||
| #### Requirements | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| [![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker] | ||||
|  | ||||
| [ollama]: https://ollama.ai/ | ||||
| <a href="https://ollama.com/" target="_blank"> | ||||
|   <img src="https://img.shields.io/badge/v0.3.0-green.svg?style=for-the-badge&labelColor=gray&label=Ollama&color=blue" alt=""/> | ||||
| </a> | ||||
|  | ||||
| [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray | ||||
| <table> | ||||
| <tr> | ||||
| <td>  | ||||
|  | ||||
| [ollama-docker]: https://hub.docker.com/r/ollama/ollama | ||||
| <a href="https://ollama.ai/" target="_blank">Local Installation</a> | ||||
|  | ||||
| [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray | ||||
| </td>  | ||||
|  | ||||
| #### Installation | ||||
| <td>  | ||||
|  | ||||
| Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases). | ||||
| <a href="https://hub.docker.com/r/ollama/ollama" target="_blank">Docker Installation</a> | ||||
|  | ||||
|  | ||||
| </td> | ||||
| </tr> | ||||
| <tr> | ||||
| <td> | ||||
|  | ||||
| ##### For Maven | ||||
| <a href="https://ollama.com/download/Ollama-darwin.zip" target="_blank">Download for macOS</a> | ||||
|  | ||||
| 1. In your Maven project, add this dependency: | ||||
| <a href="https://ollama.com/download/OllamaSetup.exe" target="_blank">Download for Windows</a> | ||||
|  | ||||
| Install on Linux | ||||
|  | ||||
| ```shell  | ||||
| curl -fsSL https://ollama.com/install.sh | sh | ||||
| ``` | ||||
|  | ||||
| </td> | ||||
| <td> | ||||
|  | ||||
|  | ||||
|  | ||||
| CPU only | ||||
|  | ||||
| ```shell | ||||
| docker run -d -p 11434:11434 \ | ||||
|   -v ollama:/root/.ollama \ | ||||
|   --name ollama \ | ||||
|   ollama/ollama | ||||
| ``` | ||||
|  | ||||
| NVIDIA GPU | ||||
|  | ||||
| ```shell | ||||
| docker run -d -p 11434:11434 \ | ||||
|   --gpus=all \ | ||||
|   -v ollama:/root/.ollama \ | ||||
|   --name ollama \ | ||||
|   ollama/ollama | ||||
| ``` | ||||
|  | ||||
| </td> | ||||
| </tr> | ||||
| </table> | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
| > [!NOTE] | ||||
| > We are now publishing the artifacts to both Maven Central and GitHub package repositories. | ||||
| > | ||||
| > Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version | ||||
| > according to your requirements. | ||||
|  | ||||
| ### For Maven | ||||
|  | ||||
| #### Using [Maven Central](https://central.sonatype.com/) | ||||
|  | ||||
| [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link] | ||||
|  | ||||
| [ollama4j-mvn-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview | ||||
|  | ||||
| [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central | ||||
|  | ||||
| In your Maven project, add this dependency: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| <dependency> | ||||
|     <groupId>io.github.amithkoujalgi</groupId> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>v1.0.74</version> | ||||
|     <version>1.0.89</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| 2. Add repository to your project's pom.xml: | ||||
| #### Using GitHub's Maven Package Repository | ||||
|  | ||||
| [![][ollama4j-releases-shield]][ollama4j-releases-link] | ||||
|  | ||||
| [ollama4j-releases-link]: https://github.com/ollama4j/ollama4j/releases | ||||
|  | ||||
| [ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages | ||||
|  | ||||
| 1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`: | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| @@ -100,7 +173,7 @@ Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases). | ||||
|     <repository> | ||||
|         <id>github</id> | ||||
|         <name>GitHub Apache Maven Packages</name> | ||||
|         <url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url> | ||||
|         <url>https://maven.pkg.github.com/ollama4j/ollama4j</url> | ||||
|         <releases> | ||||
|             <enabled>true</enabled> | ||||
|         </releases> | ||||
| @@ -111,7 +184,7 @@ Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases). | ||||
| </repositories> | ||||
| ``` | ||||
|  | ||||
| 3. Add GitHub server to settings.xml. (Usually available at ~/.m2/settings.xml) | ||||
| 2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml) | ||||
|  | ||||
| ```xml | ||||
|  | ||||
| @@ -129,22 +202,24 @@ Check the releases [here](https://github.com/amithkoujalgi/ollama4j/releases). | ||||
| </settings> | ||||
| ``` | ||||
|  | ||||
| ##### For Gradle | ||||
| 3. In your Maven project, add this dependency: | ||||
|  | ||||
| In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL: | ||||
| ```xml | ||||
|  | ||||
| ```kotlin | ||||
| dependencies { | ||||
| <dependency> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>1.0.89</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
|     val ollama4jVersion = "1.0.74" | ||||
| ### For Gradle | ||||
|  | ||||
|     implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion") | ||||
| } | ||||
|  ``` | ||||
| 1. Add the dependency | ||||
|  | ||||
| ```groovy | ||||
| dependencies { | ||||
|     implementation("io.github.amithkoujalgi:ollama4j:1.0.74") | ||||
|     implementation 'io.github.ollama4j:ollama4j:1.0.79' | ||||
| } | ||||
| ``` | ||||
|  | ||||
| @@ -152,19 +227,20 @@ dependencies { | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # ([![][lib-shield]][lib]) | ||||
|  | ||||
| [lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j | ||||
| [lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j | ||||
|  | ||||
| [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray | ||||
|  | ||||
| #### API Spec | ||||
|  | ||||
| Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/). | ||||
| > [!TIP] | ||||
| > Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/). | ||||
|  | ||||
| #### Development | ||||
|  | ||||
| @@ -177,66 +253,85 @@ make build | ||||
| Run unit tests: | ||||
|  | ||||
| ```shell | ||||
| make ut | ||||
| make unit-tests | ||||
| ``` | ||||
|  | ||||
| Run integration tests: | ||||
|  | ||||
| ```shell | ||||
| make it | ||||
| make integration-tests | ||||
| ``` | ||||
|  | ||||
| #### Releases | ||||
|  | ||||
| Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub | ||||
| Actions CI workflow. | ||||
| Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch. | ||||
|  | ||||
| #### Who's using Ollama4j? | ||||
| ## ⭐ Give us a Star! | ||||
|  | ||||
| - `Datafaker`: a library to generate fake data | ||||
|     - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api | ||||
| - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j | ||||
|     - https://github.com/TEAMPB/ollama4j-vaadin-ui | ||||
| - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the | ||||
|   server to translate all messages into a specfic target language. | ||||
|     - https://github.com/liebki/ollama-translator | ||||
| If you like or are using this project to build your own, please give us a star. It's a free way to show your support. | ||||
|  | ||||
| #### Traction | ||||
| ## Who's using Ollama4j? | ||||
|  | ||||
| [](https://star-history.com/#amithkoujalgi/ollama4j&Date) | ||||
| | # | Project Name      | Description                                                                                                   | Link                                                                                                                                           | | ||||
| |---|-------------------|---------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------| | ||||
| | 1 | Datafaker         | A library to generate fake data                                                                               | [GitHub](https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api)                                                         | | ||||
| | 2 | Vaadin Web UI     | UI-Tester for interactions with Ollama via ollama4j                                                           | [GitHub](https://github.com/TEAMPB/ollama4j-vaadin-ui)                                                                                         | | ||||
| | 3 | ollama-translator | A Minecraft 1.20.6 Spigot plugin that translates all messages into a specific target language via Ollama      | [GitHub](https://github.com/liebki/ollama-translator)                                                                                          | | ||||
| | 4 | AI Player         | A Minecraft mod that adds an intelligent "second player" to the game                                          | [GitHub](https://github.com/shasankp000/AI-Player), <br/> [Reddit Thread](https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/) | | ||||
| | 5 | Ollama4j Web UI   | A web UI for Ollama written in Java using Spring Boot, Vaadin, and Ollama4j                                   | [GitHub](https://github.com/ollama4j/ollama4j-web-ui)                                                                                          | | ||||
| | 6 | JnsCLI            | A command-line tool for Jenkins that manages jobs, builds, and configurations, with AI-powered error analysis | [GitHub](https://github.com/mirum8/jnscli)                                                                                                     | | ||||
| | 7 | Katie Backend     | An open-source AI-based question-answering platform for accessing private domain knowledge                    | [GitHub](https://github.com/wyona/katie-backend)                                                                                               | | ||||
| | 8 | TeleLlama3 Bot    | A question-answering Telegram bot                                                                             | [Repo](https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot)                                                                           | | ||||
| | 9 | moqui-wechat      | A moqui-wechat component                                                                                      | [GitHub](https://github.com/heguangyong/moqui-wechat)                                                                                          | | ||||
|  | ||||
| ### Areas of improvement | ||||
| ## Traction | ||||
|  | ||||
| - [x] Use Java-naming conventions for attributes in the request/response models instead of the | ||||
|   snake-case conventions. ( | ||||
|   possibly with Jackson-mapper's `@JsonProperty`) | ||||
| - [x] Fix deprecated HTTP client code | ||||
| - [x] Setup logging | ||||
| - [x] Use lombok | ||||
| - [x] Update request body creation with Java objects | ||||
| - [ ] Async APIs for images | ||||
| - [ ] Support for function calling with models like Mistral | ||||
|     - [x] generate in sync mode | ||||
|     - [ ] generate in async mode | ||||
| - [ ] Add custom headers to requests | ||||
| - [x] Add additional params for `ask` APIs such as: | ||||
|     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - | ||||
|       Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||
|     - [x] `system`: system prompt to (overrides what is defined in the Modelfile) | ||||
|     - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) | ||||
|     - [x] `context`: the context parameter returned from a previous request, which can be used to keep a | ||||
|       short | ||||
|       conversational memory | ||||
|     - [x] `stream`: Add support for streaming responses from the model | ||||
| - [ ] Add test cases | ||||
| - [ ] Handle exceptions better (maybe throw more appropriate exceptions) | ||||
| [](https://star-history.com/#ollama4j/ollama4j&Date) | ||||
|  | ||||
| ## Get Involved | ||||
|  | ||||
| <div align="center"> | ||||
|  | ||||
| <a href=""></a> | ||||
| <a href=""></a> | ||||
| <a href=""></a> | ||||
| <a href=""></a> | ||||
| <a href=""></a> | ||||
|  | ||||
| </div> | ||||
|  | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| [//]: # () | ||||
|  | ||||
| ### Get Involved | ||||
|  | ||||
| Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping | ||||
| with code - any sort | ||||
| of contribution is much appreciated. | ||||
|  | ||||
| ## 🏷️ License and Citation | ||||
|  | ||||
| The code is available under [MIT License](./LICENSE). | ||||
|  | ||||
| If you find this project helpful in your research, please cite this work at | ||||
|  | ||||
| ``` | ||||
| @misc{ollama4j2024, | ||||
|     author       = {Amith Koujalgi}, | ||||
|     title        = {Ollama4j: A Java Library (Wrapper/Binding) for Ollama Server}, | ||||
|     year         = {2024}, | ||||
|     month        = {January}, | ||||
|     url          = {https://github.com/ollama4j/ollama4j} | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### References | ||||
|  | ||||
| - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | ||||
| @@ -246,19 +341,16 @@ of contribution is much appreciated. | ||||
| The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | ||||
| project. | ||||
|  | ||||
|  | ||||
| <div style="text-align: center"> | ||||
|  | ||||
| **Thanks to the amazing contributors** | ||||
|  | ||||
| <a href="https://github.com/amithkoujalgi/ollama4j/graphs/contributors"> | ||||
|   <img src="https://contrib.rocks/image?repo=amithkoujalgi/ollama4j" /> | ||||
| </a> | ||||
| <p align="center"> | ||||
|   <a href="https://github.com/ollama4j/ollama4j/graphs/contributors"> | ||||
|     <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j"  alt=""/> | ||||
|   </a> | ||||
| </p> | ||||
|  | ||||
| ### Appreciate my work? | ||||
|  | ||||
| <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> | ||||
|  | ||||
|  | ||||
| </div> | ||||
|  | ||||
| <p align="center"> | ||||
|   <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> | ||||
| </p> | ||||
|   | ||||
| @@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀 | ||||
| I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java | ||||
| applications! 🌐🚀 | ||||
|  | ||||
| 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j) | ||||
| 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j) | ||||
|  | ||||
| 🌟 Key Features: | ||||
|  | ||||
| @@ -58,9 +58,9 @@ elevate your projects. | ||||
|  | ||||
| I look forward to seeing the incredible applications/projects you'll build with Ollama4j! 🌟 | ||||
|  | ||||
| Find the full API spec here: https://amithkoujalgi.github.io/ollama4j/ | ||||
| Find the full API spec here: https://ollama4j.github.io/ollama4j/ | ||||
|  | ||||
| Find the Javadoc here: https://amithkoujalgi.github.io/ollama4j/apidocs/ | ||||
| Find the Javadoc here: https://ollama4j.github.io/ollama4j/apidocs/ | ||||
|  | ||||
| Ollama4j Docs is powered by [Docusaurus](https://docusaurus.io). | ||||
|  | ||||
|   | ||||
| @@ -10,6 +10,8 @@ Ollama server would be setup behind a gateway/reverse proxy with basic auth. | ||||
| After configuring basic authentication, all subsequent requests will include the Basic Auth header. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
| @@ -31,13 +31,14 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md | ||||
|  | ||||
| Also, see how to set those Ollama parameters using | ||||
| the `OptionsBuilder` | ||||
| from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html). | ||||
| from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html). | ||||
|  | ||||
| ## Build an empty `Options` object | ||||
|  | ||||
| ```java | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
| @@ -55,8 +56,8 @@ public class Main { | ||||
| ## Build the `Options` object with values | ||||
|  | ||||
| ```java | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|   | ||||
| @@ -7,6 +7,8 @@ sidebar_position: 3 | ||||
| This API lets you check the reachability of Ollama server. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
							
								
								
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								docs/docs/apis-extras/ps.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| --- | ||||
| sidebar_position: 4 | ||||
| --- | ||||
|  | ||||
| # PS | ||||
|  | ||||
| This API provides a list of running models and details about each model currently loaded into memory. | ||||
|  | ||||
| This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API. | ||||
|  | ||||
| ```java | ||||
| package io.github.ollama4j.localtests; | ||||
|  | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.ps.ModelsProcessResponse; | ||||
|  | ||||
| import java.io.IOException; | ||||
|  | ||||
| public class Main { | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434"); | ||||
|  | ||||
|         ModelsProcessResponse response = ollamaAPI.ps(); | ||||
|  | ||||
|         System.out.println(response); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| @@ -7,6 +7,8 @@ sidebar_position: 2 | ||||
| This API lets you set the request timeout for the Ollama client. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|   public static void main(String[] args) { | ||||
|   | ||||
| @@ -9,6 +9,8 @@ This API lets you set the verbosity of the Ollama client. | ||||
| ## Try asking a question about the model. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
| @@ -10,6 +10,13 @@ information using the history of already asked questions and the respective answ | ||||
| ## Create a new conversation and use chat history to augment follow up questions | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -20,7 +27,7 @@ public class Main { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||
|  | ||||
|         // create first user question | ||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||
|         OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||
|                 .build(); | ||||
|  | ||||
|         // start conversation with model | ||||
| @@ -75,9 +82,44 @@ You will get a response similar to: | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| ## Conversational loop | ||||
|  | ||||
| ```java | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(); | ||||
|         ollamaAPI.setRequestTimeoutSeconds(60); | ||||
|  | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>"); | ||||
|  | ||||
|         OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build(); | ||||
|         OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel); | ||||
|         System.out.println(initialChatResult.getResponse()); | ||||
|  | ||||
|         List<OllamaChatMessage> history = initialChatResult.getChatHistory(); | ||||
|  | ||||
|         while (true) { | ||||
|             OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build()); | ||||
|             System.out.println(chatResult.getResponse()); | ||||
|             history = chatResult.getChatHistory(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ## Create a conversation where the answer is streamed | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResult; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
|  | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -86,7 +128,7 @@ public class Main { | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||
|         OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, | ||||
|                         "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|                 .build(); | ||||
|  | ||||
| @@ -113,7 +155,13 @@ You will get a response similar to: | ||||
| ## Use a simple Console Output Stream Handler | ||||
|  | ||||
| ```java | ||||
| import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.impl.ConsoleOutputStreamHandler; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| public class Main { | ||||
|     public static void main(String[] args) throws Exception { | ||||
| @@ -121,7 +169,7 @@ public class Main { | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") | ||||
|         OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") | ||||
|                 .build(); | ||||
|         OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); | ||||
|         ollamaAPI.chat(requestModel, streamHandler); | ||||
| @@ -132,6 +180,14 @@ public class Main { | ||||
| ## Create a new conversation with individual system prompt | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -142,7 +198,7 @@ public class Main { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||
|  | ||||
|         // create request with system-prompt (overriding the model defaults) and user question | ||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||
|         OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||
|                 .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||
|                 .build(); | ||||
|  | ||||
| @@ -162,6 +218,16 @@ You will get a response similar to: | ||||
| ## Create a conversation about an image (requires model with image recognition skills) | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequest; | ||||
| import io.github.ollama4j.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| import java.io.File; | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -172,9 +238,10 @@ public class Main { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); | ||||
|  | ||||
|         // Load Image from File and attach to user message (alternatively images could also be added via URL) | ||||
|         OllamaChatRequestModel requestModel = | ||||
|         OllamaChatRequest requestModel = | ||||
|                 builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", | ||||
|                         List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); | ||||
|                         List.of( | ||||
|                                 new File("/path/to/image"))).build(); | ||||
|  | ||||
|         OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||
|         System.out.println("First answer: " + chatResult.getResponse()); | ||||
|   | ||||
							
								
								
									
										65
									
								
								docs/docs/apis-generate/custom-roles.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								docs/docs/apis-generate/custom-roles.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| --- | ||||
| sidebar_position: 8 | ||||
| --- | ||||
|  | ||||
| # Custom Roles | ||||
|  | ||||
| Allows to manage custom roles (apart from the base roles) for chat interactions with the models. | ||||
|  | ||||
| _Particularly helpful when you would need to use different roles that the newer models support other than the base | ||||
| roles._ | ||||
|  | ||||
| _Base roles are `SYSTEM`, `USER`, `ASSISTANT`, `TOOL`._ | ||||
|  | ||||
| ### Usage | ||||
|  | ||||
| #### Add new role | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaChatMessageRole customRole = ollamaAPI.addCustomRole("custom-role"); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### List roles | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<OllamaChatMessageRole> roles = ollamaAPI.listRoles(); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### Get role | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessageRole; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<OllamaChatMessageRole> roles = ollamaAPI.getRole("custom-role"); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| @@ -1,42 +1,48 @@ | ||||
| --- | ||||
| sidebar_position: 3 | ||||
| sidebar_position: 2 | ||||
| --- | ||||
|  | ||||
| # Generate - Async | ||||
|  | ||||
| This API lets you ask questions to the LLMs in a asynchronous way. | ||||
| These APIs correlate to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | ||||
| This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the | ||||
| background (such as threads) without blocking your code until the response arrives from the model. | ||||
|  | ||||
| This API corresponds to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaAsyncResultStreamer; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|     public static void main(String[] args) throws Exception { | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|         ollamaAPI.setRequestTimeoutSeconds(60); | ||||
|         String prompt = "List all cricket world cup teams of 2019."; | ||||
|         OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false); | ||||
|  | ||||
|         String prompt = "Who are you?"; | ||||
|         // Set the poll interval according to your needs.  | ||||
|         // Smaller the poll interval, more frequently you receive the tokens. | ||||
|         int pollIntervalMilliseconds = 1000; | ||||
|  | ||||
|         OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt); | ||||
|  | ||||
|         while (!callback.isComplete() || !callback.getStream().isEmpty()) { | ||||
|             // poll for data from the response stream | ||||
|             String result = callback.getStream().poll(); | ||||
|             if (result != null) { | ||||
|                 System.out.print(result); | ||||
|         while (true) { | ||||
|             String tokens = streamer.getStream().poll(); | ||||
|             System.out.print(tokens); | ||||
|             if (!streamer.isAlive()) { | ||||
|                 break; | ||||
|             } | ||||
|             Thread.sleep(100); | ||||
|             Thread.sleep(pollIntervalMilliseconds); | ||||
|         } | ||||
|  | ||||
|         System.out.println("\n------------------------"); | ||||
|         System.out.println("Complete Response:"); | ||||
|         System.out.println("------------------------"); | ||||
|  | ||||
|         System.out.println(streamer.getCompleteResponse()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| > I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational | ||||
| > manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide | ||||
| > range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that | ||||
| > require | ||||
| > natural language understanding and generation capabilities. | ||||
| ``` | ||||
| @@ -8,10 +8,87 @@ Generate embeddings from a model. | ||||
|  | ||||
| Parameters: | ||||
|  | ||||
| - `model`: name of model to generate embeddings from | ||||
| - `input`: text/s to generate embeddings for | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; | ||||
| import java.util.Arrays; | ||||
| import java.util.Collections; | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaEmbedResponseModel embeddings = ollamaAPI.embed("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?")); | ||||
|  | ||||
|         System.out.println(embeddings); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Or, using the `OllamaEmbedRequestModel`: | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;import java.util.Arrays; | ||||
| import java.util.Collections; | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         OllamaEmbedResponseModel embeddings = ollamaAPI.embed(new OllamaEmbedRequestModel("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?"))); | ||||
|  | ||||
|         System.out.println(embeddings); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| ```json | ||||
| { | ||||
|     "model": "all-minilm", | ||||
|     "embeddings": [[-0.034674067, 0.030984823, 0.0067988685]], | ||||
|     "total_duration": 14173700, | ||||
|     "load_duration": 1198800, | ||||
|     "prompt_eval_count": 2 | ||||
| } | ||||
| ```` | ||||
|  | ||||
| :::note | ||||
|  | ||||
| This is a deprecated API | ||||
|  | ||||
| ::: | ||||
|  | ||||
| Parameters: | ||||
|  | ||||
| - `model`: name of model to generate embeddings from | ||||
| - `prompt`: text to generate embeddings for | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -36,11 +113,6 @@ You will get a response similar to: | ||||
|     0.009260174818336964, | ||||
|     0.23178744316101074, | ||||
|     -0.2916173040866852, | ||||
|     -0.8924556970596313, | ||||
|     0.8785552978515625, | ||||
|     -0.34576427936553955, | ||||
|     0.5742510557174683, | ||||
|     -0.04222835972905159, | ||||
|     -0.137906014919281 | ||||
|     -0.8924556970596313 | ||||
| ] | ||||
| ``` | ||||
| @@ -5,8 +5,8 @@ sidebar_position: 4 | ||||
| # Generate - With Image Files | ||||
|  | ||||
| This API lets you ask questions along with the image files to the LLMs. | ||||
| These APIs correlate to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | ||||
| This API corresponds to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||
|  | ||||
| :::note | ||||
|  | ||||
| @@ -22,6 +22,14 @@ If you have this image downloaded and you pass the path to the downloaded image | ||||
|  | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| import java.io.File; | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -32,7 +40,9 @@ public class Main { | ||||
|         OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA, | ||||
|                 "What's in this image?", | ||||
|                 List.of( | ||||
|                         new File("/path/to/image"))); | ||||
|                         new File("/path/to/image")), | ||||
|                 new OptionsBuilder().build() | ||||
|         ); | ||||
|         System.out.println(result.getResponse()); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -5,8 +5,8 @@ sidebar_position: 5 | ||||
| # Generate - With Image URLs | ||||
|  | ||||
| This API lets you ask questions along with the image files to the LLMs. | ||||
| These APIs correlate to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | ||||
| This API corresponds to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||
|  | ||||
| :::note | ||||
|  | ||||
| @@ -22,6 +22,13 @@ Passing the link of this image the following code: | ||||
|  | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -32,7 +39,9 @@ public class Main { | ||||
|         OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA, | ||||
|                 "What's in this image?", | ||||
|                 List.of( | ||||
|                         "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg")); | ||||
|                         "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), | ||||
|                 new OptionsBuilder().build() | ||||
|         ); | ||||
|         System.out.println(result.getResponse()); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -1,12 +1,12 @@ | ||||
| --- | ||||
| sidebar_position: 2 | ||||
| sidebar_position: 3 | ||||
| --- | ||||
|  | ||||
| # Generate - With Tools | ||||
|  | ||||
| This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a | ||||
| synchronous way. | ||||
| This API correlates to | ||||
| This API corresponds to | ||||
| the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. | ||||
|  | ||||
| :::note | ||||
| @@ -29,8 +29,10 @@ You could do that with ease with the `function calling` capabilities of the mode | ||||
|  | ||||
| ### Create Functions | ||||
|  | ||||
| This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns a | ||||
| value. | ||||
| We can create static functions as our tools. | ||||
|  | ||||
| This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns | ||||
| fuel price value. | ||||
|  | ||||
| ```java | ||||
| public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
| @@ -40,8 +42,8 @@ public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
| } | ||||
| ``` | ||||
|  | ||||
| This function takes the argument `city` and performs an operation with the argument and returns a | ||||
| value. | ||||
| This function takes the argument `city` and performs an operation with the argument and returns the weather for a | ||||
| location. | ||||
|  | ||||
| ```java | ||||
| public static String getCurrentWeather(Map<String, Object> arguments) { | ||||
| @@ -50,6 +52,21 @@ public static String getCurrentWeather(Map<String, Object> arguments) { | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Another way to create our tools is by creating classes by extending `ToolFunction`. | ||||
|  | ||||
| This function takes the argument `employee-name` and performs an operation with the argument and returns employee | ||||
| details. | ||||
|  | ||||
| ```java | ||||
| class DBQueryFunction implements ToolFunction { | ||||
|     @Override | ||||
|     public Object apply(Map<String, Object> arguments) { | ||||
|         // perform DB operations here | ||||
|         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ### Define Tool Specifications | ||||
|  | ||||
| Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. | ||||
| @@ -58,13 +75,13 @@ Lets define a sample tool specification called **Fuel Price Tool** for getting t | ||||
| - Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. | ||||
|  | ||||
| ```java | ||||
| MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() | ||||
| Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||
|         .functionName("current-fuel-price") | ||||
|         .functionDesc("Get current fuel price") | ||||
|         .props( | ||||
|                 new MistralTools.PropsBuilder() | ||||
|                         .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                         .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|         .functionDescription("Get current fuel price") | ||||
|         .properties( | ||||
|                 new Tools.PropsBuilder() | ||||
|                         .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                         .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|                         .build() | ||||
|         ) | ||||
|         .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||
| @@ -77,18 +94,38 @@ Lets also define a sample tool specification called **Weather Tool** for getting | ||||
| - Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. | ||||
|  | ||||
| ```java | ||||
| MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() | ||||
| Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||
|         .functionName("current-weather") | ||||
|         .functionDesc("Get current weather") | ||||
|         .props( | ||||
|                 new MistralTools.PropsBuilder() | ||||
|                         .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|         .functionDescription("Get current weather") | ||||
|         .properties( | ||||
|                 new Tools.PropsBuilder() | ||||
|                         .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                         .build() | ||||
|         ) | ||||
|         .toolDefinition(SampleTools::getCurrentWeather) | ||||
|         .build(); | ||||
| ``` | ||||
|  | ||||
| Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database. | ||||
|  | ||||
| - Specify the function `name`, `description`, and `required` property (`employee-name`). | ||||
| - Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`. | ||||
|  | ||||
| ```java | ||||
| Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||
|         .functionName("get-employee-details") | ||||
|         .functionDescription("Get employee details from the database") | ||||
|         .properties( | ||||
|                 new Tools.PropsBuilder() | ||||
|                         .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||
|                         .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||
|                         .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||
|                         .build() | ||||
|         ) | ||||
|         .toolDefinition(new DBQueryFunction()) | ||||
|         .build(); | ||||
| ``` | ||||
|  | ||||
| ### Register the Tools | ||||
|  | ||||
| Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. | ||||
| @@ -96,6 +133,7 @@ Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. | ||||
| ```shell | ||||
| ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||
| ollamaAPI.registerTool(weatherToolSpecification); | ||||
| ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||
| ``` | ||||
|  | ||||
| ### Create prompt with Tools | ||||
| @@ -103,14 +141,14 @@ ollamaAPI.registerTool(weatherToolSpecification); | ||||
| `Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. | ||||
|  | ||||
| ```shell | ||||
| String prompt1 = new MistralTools.PromptBuilder() | ||||
|         .withToolSpecification(fuelPriceToolSpecification) | ||||
|         .withToolSpecification(weatherToolSpecification) | ||||
|         .withPrompt("What is the petrol price in Bengaluru?") | ||||
|         .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, false, new OptionsBuilder().build()); | ||||
| for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|   System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
| String prompt1 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the petrol price in Bengaluru?") | ||||
|                 .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build()); | ||||
| for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||
|     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| @@ -120,21 +158,21 @@ You will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
| [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
|  | ||||
| :::: | ||||
|  | ||||
| `Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. | ||||
|  | ||||
| ```shell | ||||
| String prompt2 = new MistralTools.PromptBuilder() | ||||
|         .withToolSpecification(fuelPriceToolSpecification) | ||||
|         .withToolSpecification(weatherToolSpecification) | ||||
|         .withPrompt("What is the current weather in Bengaluru?") | ||||
|         .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, false, new OptionsBuilder().build()); | ||||
| for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|   System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
| String prompt2 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the current weather in Bengaluru?") | ||||
|                 .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build()); | ||||
| for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||
|     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| @@ -144,25 +182,53 @@ You will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-weather']: Currently Bengaluru's weather is nice | ||||
| [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||
|  | ||||
| :::: | ||||
|  | ||||
| `Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools. | ||||
|  | ||||
| ```shell | ||||
| String prompt3 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withToolSpecification(databaseQueryToolSpecification) | ||||
|                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||
|                 .build(); | ||||
| OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build()); | ||||
| for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||
|     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Again, fire away your question to the model. | ||||
|  | ||||
| You will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||
| Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||
|  | ||||
| :::: | ||||
|  | ||||
| ### Full Example | ||||
|  | ||||
| ```java | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.ToolDef; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.MistralTools; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.exceptions.ToolInvocationException; | ||||
| import io.github.ollama4j.tools.OllamaToolsResult; | ||||
| import io.github.ollama4j.tools.ToolFunction; | ||||
| import io.github.ollama4j.tools.Tools; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| import java.io.IOException; | ||||
| import java.util.Arrays; | ||||
| import java.util.Map; | ||||
| import java.util.UUID; | ||||
|  | ||||
| public class FunctionCallingWithMistral { | ||||
| public class FunctionCallingWithMistralExample { | ||||
|     public static void main(String[] args) throws Exception { | ||||
|         String host = "http://localhost:11434/"; | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
| @@ -170,81 +236,116 @@ public class FunctionCallingWithMistral { | ||||
|  | ||||
|         String model = "mistral"; | ||||
|  | ||||
|  | ||||
|         MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|         Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||
|                 .functionName("current-fuel-price") | ||||
|                 .functionDesc("Get current fuel price") | ||||
|                 .props( | ||||
|                         new MistralTools.PropsBuilder() | ||||
|                                 .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                                 .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|                 .functionDescription("Get current fuel price") | ||||
|                 .properties( | ||||
|                         new Tools.PropsBuilder() | ||||
|                                 .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                                 .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||
|                                 .build() | ||||
|                 ) | ||||
|                 .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||
|                 .build(); | ||||
|  | ||||
|         MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() | ||||
|         Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||
|                 .functionName("current-weather") | ||||
|                 .functionDesc("Get current weather") | ||||
|                 .props( | ||||
|                         new MistralTools.PropsBuilder() | ||||
|                                 .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                 .functionDescription("Get current weather") | ||||
|                 .properties( | ||||
|                         new Tools.PropsBuilder() | ||||
|                                 .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||
|                                 .build() | ||||
|                 ) | ||||
|                 .toolDefinition(SampleTools::getCurrentWeather) | ||||
|                 .build(); | ||||
|  | ||||
|         Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||
|                 .functionName("get-employee-details") | ||||
|                 .functionDescription("Get employee details from the database") | ||||
|                 .properties( | ||||
|                         new Tools.PropsBuilder() | ||||
|                                 .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||
|                                 .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||
|                                 .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||
|                                 .build() | ||||
|                 ) | ||||
|                 .toolDefinition(new DBQueryFunction()) | ||||
|                 .build(); | ||||
|  | ||||
|         ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||
|         ollamaAPI.registerTool(weatherToolSpecification); | ||||
|         ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||
|  | ||||
|         String prompt1 = new MistralTools.PromptBuilder() | ||||
|         String prompt1 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the petrol price in Bengaluru?") | ||||
|                 .build(); | ||||
|         String prompt2 = new MistralTools.PromptBuilder() | ||||
|         ask(ollamaAPI, model, prompt1); | ||||
|  | ||||
|         String prompt2 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withPrompt("What is the current weather in Bengaluru?") | ||||
|                 .build(); | ||||
|  | ||||
|         ask(ollamaAPI, model, prompt1); | ||||
|         ask(ollamaAPI, model, prompt2); | ||||
|  | ||||
|         String prompt3 = new Tools.PromptBuilder() | ||||
|                 .withToolSpecification(fuelPriceToolSpecification) | ||||
|                 .withToolSpecification(weatherToolSpecification) | ||||
|                 .withToolSpecification(databaseQueryToolSpecification) | ||||
|                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||
|                 .build(); | ||||
|         ask(ollamaAPI, model, prompt3); | ||||
|     } | ||||
|  | ||||
|     public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, false, new OptionsBuilder().build()); | ||||
|         for (Map.Entry<ToolDef, Object> r : toolsResult.getToolResults().entrySet()) { | ||||
|             System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); | ||||
|     public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||
|         OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build()); | ||||
|         for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||
|             System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
|  | ||||
| class SampleTools { | ||||
|     public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||
|         // Get details from fuel price API | ||||
|         String location = arguments.get("location").toString(); | ||||
|         String fuelType = arguments.get("fuelType").toString(); | ||||
|         return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||
|     } | ||||
|  | ||||
|     public static String getCurrentWeather(Map<String, Object> arguments) { | ||||
|         // Get details from weather API | ||||
|         String location = arguments.get("city").toString(); | ||||
|         return "Currently " + location + "'s weather is nice."; | ||||
|     } | ||||
| } | ||||
|  | ||||
| class DBQueryFunction implements ToolFunction { | ||||
|     @Override | ||||
|     public Object apply(Map<String, Object> arguments) { | ||||
|         // perform DB operations here | ||||
|         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Run this full example and you will get a response similar to: | ||||
|  | ||||
| ::::tip[LLM Response] | ||||
|  | ||||
| [Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
| [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||
|  | ||||
| [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||
|  | ||||
| [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||
| Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||
|  | ||||
| [Response from tool 'current-weather']: Currently Bengaluru's weather is nice | ||||
| :::: | ||||
|  | ||||
| ### Room for improvement | ||||
| ### Potential Improvements | ||||
|  | ||||
| Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool | ||||
| registration. For example: | ||||
|   | ||||
| @@ -5,8 +5,8 @@ sidebar_position: 1 | ||||
| # Generate - Sync | ||||
|  | ||||
| This API lets you ask questions to the LLMs in a synchronous way. | ||||
| These APIs correlate to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | ||||
| This API corresponds to | ||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||
|  | ||||
| Use the `OptionBuilder` to build the `Options` object | ||||
| with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||
| @@ -16,6 +16,11 @@ to [this](/apis-extras/options-builder). | ||||
| ## Try asking a question about the model. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -44,6 +49,11 @@ You will get a response similar to: | ||||
| ## Try asking a question, receiving the answer streamed | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -80,6 +90,11 @@ You will get a response similar to: | ||||
| ## Try asking a question from general topics. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -123,6 +138,12 @@ You'd then get a response from the model: | ||||
| ## Try asking for a Database query for your data schema. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
| import io.github.ollama4j.utils.SamplePrompts; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -141,7 +162,7 @@ public class Main { | ||||
| ``` | ||||
|  | ||||
| _Note: Here I've used | ||||
| a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | ||||
| a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | ||||
| containing a database schema from within this library for demonstration purposes._ | ||||
|  | ||||
| You'd then get a response from the model: | ||||
|   | ||||
| @@ -8,13 +8,13 @@ This is designed for prompt engineering. It allows you to easily build the promp | ||||
| inferences. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
| import io.github.ollama4j.utils.OptionsBuilder; | ||||
| import io.github.ollama4j.utils.PromptBuilder; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.PromptBuilder; | ||||
|  | ||||
| public class AskPhi { | ||||
| public class Main { | ||||
|     public static void main(String[] args) throws Exception { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
| @@ -42,7 +42,8 @@ public class AskPhi { | ||||
|                         .addSeparator() | ||||
|                         .add("How do I read a file in Go and print its contents to stdout?"); | ||||
|  | ||||
|         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); | ||||
|         boolean raw = false; | ||||
|         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), raw, new OptionsBuilder().build()); | ||||
|         System.out.println(response.getResponse()); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 4 | ||||
| sidebar_position: 5 | ||||
| --- | ||||
|  | ||||
| # Create Model | ||||
| @@ -9,6 +9,8 @@ This API lets you create a custom model on the Ollama server. | ||||
| ### Create a model from an existing Modelfile in the Ollama server | ||||
|  | ||||
| ```java title="CreateModel.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class CreateModel { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 5 | ||||
| sidebar_position: 6 | ||||
| --- | ||||
|  | ||||
| # Delete Model | ||||
| @@ -7,6 +7,8 @@ sidebar_position: 5 | ||||
| This API lets you create a delete a model from the Ollama server. | ||||
|  | ||||
| ```java title="DeleteModel.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 3 | ||||
| sidebar_position: 4 | ||||
| --- | ||||
|  | ||||
| # Get Model Details | ||||
| @@ -7,6 +7,10 @@ sidebar_position: 3 | ||||
| This API lets you get the details of a model on the Ollama server. | ||||
|  | ||||
| ```java title="GetModelDetails.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.ModelDetail; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
							
								
								
									
										133
									
								
								docs/docs/apis-model-management/list-library-models.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								docs/docs/apis-model-management/list-library-models.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,133 @@ | ||||
| --- | ||||
| sidebar_position: 1 | ||||
| --- | ||||
|  | ||||
| # Models from Ollama Library | ||||
|  | ||||
| These API retrieves a list of models directly from the Ollama library. | ||||
|  | ||||
| ### List Models from Ollama Library | ||||
|  | ||||
| This API fetches available models from the Ollama library page, including details such as the model's name, pull count, | ||||
| popular tags, tag count, and the last update time. | ||||
|  | ||||
| ```java title="ListLibraryModels.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModel; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary(); | ||||
|  | ||||
|         System.out.println(libraryModels); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| [ | ||||
|     LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),  | ||||
|     LibraryModel(name=llama3.2, description=Meta's Llama 3.2 goes small with 1B and 3B models., pullCount=2.4M, totalTags=63, popularTags=[tools, 1b, 3b], lastUpdated=6 weeks ago) | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| ### Get Tags of a Library Model | ||||
|  | ||||
| This API Fetches the tags associated with a specific model from Ollama library. | ||||
|  | ||||
| ```java title="GetLibraryModelTags.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModel; | ||||
| import io.github.ollama4j.models.response.LibraryModelDetail; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         List<LibraryModel> libraryModels = ollamaAPI.listModelsFromLibrary(); | ||||
|  | ||||
|         LibraryModelDetail libraryModelDetail = ollamaAPI.getLibraryModelDetails(libraryModels.get(0)); | ||||
|  | ||||
|         System.out.println(libraryModelDetail); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| LibraryModelDetail( | ||||
|   model=LibraryModel(name=llama3.2-vision, description=Llama 3.2 Vision is a collection of instruction-tuned image reasoning generative models in 11B and 90B sizes., pullCount=21.1K, totalTags=9, popularTags=[vision, 11b, 90b], lastUpdated=yesterday),  | ||||
|   tags=[ | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=latest, size=7.9GB, lastUpdated=yesterday),  | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=11b, size=7.9GB, lastUpdated=yesterday),  | ||||
|         LibraryModelTag(name=llama3.2-vision, tag=90b, size=55GB, lastUpdated=yesterday) | ||||
|     ] | ||||
| ) | ||||
| ``` | ||||
|  | ||||
| ### Find a model from Ollama library | ||||
|  | ||||
| This API finds a specific model using model `name` and `tag` from Ollama library. | ||||
|  | ||||
| ```java title="FindLibraryModel.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModelTag; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b"); | ||||
|  | ||||
|         System.out.println(libraryModelTag); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The following is the sample output: | ||||
|  | ||||
| ``` | ||||
| LibraryModelTag(name=qwen2.5, tag=7b, size=4.7GB, lastUpdated=7 weeks ago) | ||||
| ``` | ||||
|  | ||||
| ### Pull model using `LibraryModelTag` | ||||
|  | ||||
| You can use `LibraryModelTag` to pull models into Ollama server. | ||||
|  | ||||
| ```java title="PullLibraryModelTags.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.LibraryModelTag; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
|  | ||||
|         LibraryModelTag libraryModelTag = ollamaAPI.findModelTagFromLibrary("qwen2.5", "7b"); | ||||
|  | ||||
|         ollamaAPI.pullModel(libraryModelTag); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| @@ -1,12 +1,17 @@ | ||||
| --- | ||||
| sidebar_position: 1 | ||||
| sidebar_position: 2 | ||||
| --- | ||||
|  | ||||
| # List Models | ||||
| # List Local Models | ||||
|  | ||||
| This API lets you list available models on the Ollama server. | ||||
| This API lets you list downloaded/available models on the Ollama server. | ||||
|  | ||||
| ```java title="ListModels.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.models.response.Model; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| public class ListModels { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| --- | ||||
| sidebar_position: 2 | ||||
| sidebar_position: 3 | ||||
| --- | ||||
|  | ||||
| # Pull Model | ||||
| @@ -7,10 +7,13 @@ sidebar_position: 2 | ||||
| This API lets you pull a model on the Ollama server. | ||||
|  | ||||
| ```java title="PullModel.java" | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.types.OllamaModelType; | ||||
|  | ||||
| public class Main { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|          | ||||
|  | ||||
|         String host = "http://localhost:11434/"; | ||||
|  | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||
| @@ -20,4 +23,12 @@ public class Main { | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Once downloaded, you can see them when you use [list models](./list-models) API. | ||||
| Once downloaded, you can see them when you use [list models](./list-models) API. | ||||
|  | ||||
| :::info | ||||
|  | ||||
| You can even pull models using Ollama model library APIs. This looks up the models directly on the Ollama model library page. Refer | ||||
| to [this](./list-library-models#pull-model-using-librarymodeltag). | ||||
|  | ||||
| ::: | ||||
|  | ||||
|   | ||||
| @@ -78,13 +78,13 @@ Add the dependency to your project's `pom.xml`. | ||||
| ```xml | ||||
|  | ||||
| <dependency> | ||||
|     <groupId>io.github.amithkoujalgi</groupId> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>1.0.27</version> | ||||
|     <version>1.0.78</version> | ||||
| </dependency> | ||||
| ``` | ||||
|  | ||||
| Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). | ||||
| Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j). | ||||
|  | ||||
| You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | ||||
| example, | ||||
| @@ -116,6 +116,26 @@ or use other suitable implementations. | ||||
| Create a new Java class in your project and add this code. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class OllamaAPITest { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
|         OllamaAPI ollamaAPI = new OllamaAPI(); | ||||
|  | ||||
|         boolean isOllamaServerReachable = ollamaAPI.ping(); | ||||
|  | ||||
|         System.out.println("Is Ollama server running: " + isOllamaServerReachable); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
| This uses the default Ollama host as `http://localhost:11434`. | ||||
|  | ||||
| Specify a different Ollama host that you want to connect to. | ||||
|  | ||||
| ```java | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
|  | ||||
| public class OllamaAPITest { | ||||
|  | ||||
|     public static void main(String[] args) { | ||||
| @@ -127,7 +147,7 @@ public class OllamaAPITest { | ||||
|  | ||||
|         boolean isOllamaServerReachable = ollamaAPI.ping(); | ||||
|  | ||||
|         System.out.println("Is Ollama server alive: " + isOllamaServerReachable); | ||||
|         System.out.println("Is Ollama server running: " + isOllamaServerReachable); | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|   | ||||
| @@ -20,7 +20,7 @@ const config = { | ||||
|  | ||||
|     // GitHub pages deployment config. | ||||
|     // If you aren't using GitHub pages, you don't need these. | ||||
|     organizationName: 'amithkoujalgi', // Usually your GitHub org/user name. | ||||
|     organizationName: 'ollama4j', // Usually your GitHub org/user name. | ||||
|     projectName: 'ollama4j', // Usually your repo name. | ||||
|  | ||||
|     onBrokenLinks: 'throw', | ||||
| @@ -46,18 +46,22 @@ const config = { | ||||
|                     // Please change this to your repo. | ||||
|                     // Remove this to remove the "edit this page" links. | ||||
|                     editUrl: | ||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', | ||||
|                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||
|                 }, | ||||
|                 blog: { | ||||
|                     showReadingTime: true, | ||||
|                     // Please change this to your repo. | ||||
|                     // Remove this to remove the "edit this page" links. | ||||
|                     editUrl: | ||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', | ||||
|                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||
|                 }, | ||||
|                 theme: { | ||||
|                     customCss: './src/css/custom.css', | ||||
|                 }, | ||||
|                 gtag: { | ||||
|                     trackingID: 'G-G7FLH6FNDC', | ||||
|                     anonymizeIP: false, | ||||
|                 }, | ||||
|             }), | ||||
|         ], | ||||
|     ], | ||||
| @@ -80,11 +84,11 @@ const config = { | ||||
|                         position: 'left', | ||||
|                         label: 'Docs', | ||||
|                     }, | ||||
|                     {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, | ||||
|                     {to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, | ||||
|                     {to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, | ||||
|                     {to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, | ||||
|                     {to: '/blog', label: 'Blog', position: 'left'}, | ||||
|                     { | ||||
|                         href: 'https://github.com/amithkoujalgi/ollama4j', | ||||
|                         href: 'https://github.com/ollama4j/ollama4j', | ||||
|                         label: 'GitHub', | ||||
|                         position: 'right', | ||||
|                     }, | ||||
| @@ -124,7 +128,7 @@ const config = { | ||||
|                             }, | ||||
|                             { | ||||
|                                 label: 'GitHub', | ||||
|                                 href: 'https://github.com/amithkoujalgi/ollama4j', | ||||
|                                 href: 'https://github.com/ollama4j/ollama4j', | ||||
|                             }, | ||||
|                         ], | ||||
|                     }, | ||||
|   | ||||
							
								
								
									
										414
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										414
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							| @@ -9,6 +9,7 @@ | ||||
|       "version": "0.0.0", | ||||
|       "dependencies": { | ||||
|         "@docusaurus/core": "^3.4.0", | ||||
|         "@docusaurus/plugin-google-gtag": "^3.4.0", | ||||
|         "@docusaurus/preset-classic": "^3.4.0", | ||||
|         "@docusaurus/theme-mermaid": "^3.4.0", | ||||
|         "@mdx-js/react": "^3.0.0", | ||||
| @@ -3382,24 +3383,6 @@ | ||||
|         "@types/ms": "*" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@types/eslint": { | ||||
|       "version": "8.56.0", | ||||
|       "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.0.tgz", | ||||
|       "integrity": "sha512-FlsN0p4FhuYRjIxpbdXovvHQhtlG05O1GG/RNWvdAxTboR438IOTwmrY/vLA+Xfgg06BTkP045M3vpFwTMv1dg==", | ||||
|       "dependencies": { | ||||
|         "@types/estree": "*", | ||||
|         "@types/json-schema": "*" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@types/eslint-scope": { | ||||
|       "version": "3.7.7", | ||||
|       "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", | ||||
|       "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", | ||||
|       "dependencies": { | ||||
|         "@types/eslint": "*", | ||||
|         "@types/estree": "*" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@types/estree": { | ||||
|       "version": "1.0.5", | ||||
|       "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", | ||||
| @@ -3690,9 +3673,10 @@ | ||||
|       "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/ast": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz", | ||||
|       "integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", | ||||
|       "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/helper-numbers": "1.11.6", | ||||
|         "@webassemblyjs/helper-wasm-bytecode": "1.11.6" | ||||
| @@ -3701,22 +3685,26 @@ | ||||
|     "node_modules/@webassemblyjs/floating-point-hex-parser": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz", | ||||
|       "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==" | ||||
|       "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/helper-api-error": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz", | ||||
|       "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==" | ||||
|       "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/helper-buffer": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz", | ||||
|       "integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==" | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", | ||||
|       "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/helper-numbers": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz", | ||||
|       "integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/floating-point-hex-parser": "1.11.6", | ||||
|         "@webassemblyjs/helper-api-error": "1.11.6", | ||||
| @@ -3726,23 +3714,26 @@ | ||||
|     "node_modules/@webassemblyjs/helper-wasm-bytecode": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz", | ||||
|       "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==" | ||||
|       "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/helper-wasm-section": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz", | ||||
|       "integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", | ||||
|       "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/helper-buffer": "1.11.6", | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@webassemblyjs/helper-buffer": "1.12.1", | ||||
|         "@webassemblyjs/helper-wasm-bytecode": "1.11.6", | ||||
|         "@webassemblyjs/wasm-gen": "1.11.6" | ||||
|         "@webassemblyjs/wasm-gen": "1.12.1" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/ieee754": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz", | ||||
|       "integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@xtuc/ieee754": "^1.2.0" | ||||
|       } | ||||
| @@ -3751,6 +3742,7 @@ | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz", | ||||
|       "integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==", | ||||
|       "license": "Apache-2.0", | ||||
|       "dependencies": { | ||||
|         "@xtuc/long": "4.2.2" | ||||
|       } | ||||
| @@ -3758,29 +3750,32 @@ | ||||
|     "node_modules/@webassemblyjs/utf8": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz", | ||||
|       "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==" | ||||
|       "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/wasm-edit": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz", | ||||
|       "integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", | ||||
|       "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/helper-buffer": "1.11.6", | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@webassemblyjs/helper-buffer": "1.12.1", | ||||
|         "@webassemblyjs/helper-wasm-bytecode": "1.11.6", | ||||
|         "@webassemblyjs/helper-wasm-section": "1.11.6", | ||||
|         "@webassemblyjs/wasm-gen": "1.11.6", | ||||
|         "@webassemblyjs/wasm-opt": "1.11.6", | ||||
|         "@webassemblyjs/wasm-parser": "1.11.6", | ||||
|         "@webassemblyjs/wast-printer": "1.11.6" | ||||
|         "@webassemblyjs/helper-wasm-section": "1.12.1", | ||||
|         "@webassemblyjs/wasm-gen": "1.12.1", | ||||
|         "@webassemblyjs/wasm-opt": "1.12.1", | ||||
|         "@webassemblyjs/wasm-parser": "1.12.1", | ||||
|         "@webassemblyjs/wast-printer": "1.12.1" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/wasm-gen": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz", | ||||
|       "integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", | ||||
|       "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@webassemblyjs/helper-wasm-bytecode": "1.11.6", | ||||
|         "@webassemblyjs/ieee754": "1.11.6", | ||||
|         "@webassemblyjs/leb128": "1.11.6", | ||||
| @@ -3788,22 +3783,24 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/wasm-opt": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz", | ||||
|       "integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", | ||||
|       "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/helper-buffer": "1.11.6", | ||||
|         "@webassemblyjs/wasm-gen": "1.11.6", | ||||
|         "@webassemblyjs/wasm-parser": "1.11.6" | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@webassemblyjs/helper-buffer": "1.12.1", | ||||
|         "@webassemblyjs/wasm-gen": "1.12.1", | ||||
|         "@webassemblyjs/wasm-parser": "1.12.1" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/wasm-parser": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz", | ||||
|       "integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", | ||||
|       "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@webassemblyjs/helper-api-error": "1.11.6", | ||||
|         "@webassemblyjs/helper-wasm-bytecode": "1.11.6", | ||||
|         "@webassemblyjs/ieee754": "1.11.6", | ||||
| @@ -3812,23 +3809,26 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@webassemblyjs/wast-printer": { | ||||
|       "version": "1.11.6", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz", | ||||
|       "integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==", | ||||
|       "version": "1.12.1", | ||||
|       "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", | ||||
|       "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@webassemblyjs/ast": "1.11.6", | ||||
|         "@webassemblyjs/ast": "1.12.1", | ||||
|         "@xtuc/long": "4.2.2" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@xtuc/ieee754": { | ||||
|       "version": "1.2.0", | ||||
|       "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", | ||||
|       "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" | ||||
|       "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", | ||||
|       "license": "BSD-3-Clause" | ||||
|     }, | ||||
|     "node_modules/@xtuc/long": { | ||||
|       "version": "4.2.2", | ||||
|       "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", | ||||
|       "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" | ||||
|       "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", | ||||
|       "license": "Apache-2.0" | ||||
|     }, | ||||
|     "node_modules/accepts": { | ||||
|       "version": "1.3.8", | ||||
| @@ -3872,10 +3872,11 @@ | ||||
|         "node": ">=0.4.0" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/acorn-import-assertions": { | ||||
|       "version": "1.9.0", | ||||
|       "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", | ||||
|       "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", | ||||
|     "node_modules/acorn-import-attributes": { | ||||
|       "version": "1.9.5", | ||||
|       "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", | ||||
|       "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", | ||||
|       "license": "MIT", | ||||
|       "peerDependencies": { | ||||
|         "acorn": "^8" | ||||
|       } | ||||
| @@ -4241,12 +4242,13 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/body-parser": { | ||||
|       "version": "1.20.1", | ||||
|       "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", | ||||
|       "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", | ||||
|       "version": "1.20.2", | ||||
|       "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", | ||||
|       "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "bytes": "3.1.2", | ||||
|         "content-type": "~1.0.4", | ||||
|         "content-type": "~1.0.5", | ||||
|         "debug": "2.6.9", | ||||
|         "depd": "2.0.0", | ||||
|         "destroy": "1.2.0", | ||||
| @@ -4254,7 +4256,7 @@ | ||||
|         "iconv-lite": "0.4.24", | ||||
|         "on-finished": "2.4.1", | ||||
|         "qs": "6.11.0", | ||||
|         "raw-body": "2.5.1", | ||||
|         "raw-body": "2.5.2", | ||||
|         "type-is": "~1.6.18", | ||||
|         "unpipe": "1.0.0" | ||||
|       }, | ||||
| @@ -4267,6 +4269,7 @@ | ||||
|       "version": "3.1.2", | ||||
|       "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", | ||||
|       "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.8" | ||||
|       } | ||||
| @@ -4275,6 +4278,7 @@ | ||||
|       "version": "2.6.9", | ||||
|       "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", | ||||
|       "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "ms": "2.0.0" | ||||
|       } | ||||
| @@ -4282,7 +4286,8 @@ | ||||
|     "node_modules/body-parser/node_modules/ms": { | ||||
|       "version": "2.0.0", | ||||
|       "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", | ||||
|       "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" | ||||
|       "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", | ||||
|       "license": "MIT" | ||||
|     }, | ||||
|     "node_modules/bonjour-service": { | ||||
|       "version": "1.1.1", | ||||
| @@ -4331,11 +4336,12 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/braces": { | ||||
|       "version": "3.0.2", | ||||
|       "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", | ||||
|       "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", | ||||
|       "version": "3.0.3", | ||||
|       "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", | ||||
|       "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "fill-range": "^7.0.1" | ||||
|         "fill-range": "^7.1.1" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">=8" | ||||
| @@ -4422,13 +4428,19 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/call-bind": { | ||||
|       "version": "1.0.5", | ||||
|       "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", | ||||
|       "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", | ||||
|       "version": "1.0.7", | ||||
|       "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", | ||||
|       "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "es-define-property": "^1.0.0", | ||||
|         "es-errors": "^1.3.0", | ||||
|         "function-bind": "^1.1.2", | ||||
|         "get-intrinsic": "^1.2.1", | ||||
|         "set-function-length": "^1.1.1" | ||||
|         "get-intrinsic": "^1.2.4", | ||||
|         "set-function-length": "^1.2.1" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
| @@ -4922,6 +4934,7 @@ | ||||
|       "version": "1.0.5", | ||||
|       "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", | ||||
|       "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.6" | ||||
|       } | ||||
| @@ -4932,9 +4945,10 @@ | ||||
|       "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" | ||||
|     }, | ||||
|     "node_modules/cookie": { | ||||
|       "version": "0.5.0", | ||||
|       "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", | ||||
|       "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", | ||||
|       "version": "0.6.0", | ||||
|       "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", | ||||
|       "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.6" | ||||
|       } | ||||
| @@ -5961,16 +5975,20 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/define-data-property": { | ||||
|       "version": "1.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", | ||||
|       "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", | ||||
|       "version": "1.1.4", | ||||
|       "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", | ||||
|       "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "get-intrinsic": "^1.2.1", | ||||
|         "gopd": "^1.0.1", | ||||
|         "has-property-descriptors": "^1.0.0" | ||||
|         "es-define-property": "^1.0.0", | ||||
|         "es-errors": "^1.3.0", | ||||
|         "gopd": "^1.0.1" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/define-lazy-prop": { | ||||
| @@ -6301,9 +6319,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/enhanced-resolve": { | ||||
|       "version": "5.15.0", | ||||
|       "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", | ||||
|       "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", | ||||
|       "version": "5.17.1", | ||||
|       "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", | ||||
|       "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "graceful-fs": "^4.2.4", | ||||
|         "tapable": "^2.2.0" | ||||
| @@ -6331,6 +6350,27 @@ | ||||
|         "is-arrayish": "^0.2.1" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/es-define-property": { | ||||
|       "version": "1.0.0", | ||||
|       "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", | ||||
|       "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "get-intrinsic": "^1.2.4" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/es-errors": { | ||||
|       "version": "1.3.0", | ||||
|       "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", | ||||
|       "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/es-module-lexer": { | ||||
|       "version": "1.4.1", | ||||
|       "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", | ||||
| @@ -6579,16 +6619,17 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/express": { | ||||
|       "version": "4.18.2", | ||||
|       "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", | ||||
|       "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", | ||||
|       "version": "4.19.2", | ||||
|       "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", | ||||
|       "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "accepts": "~1.3.8", | ||||
|         "array-flatten": "1.1.1", | ||||
|         "body-parser": "1.20.1", | ||||
|         "body-parser": "1.20.2", | ||||
|         "content-disposition": "0.5.4", | ||||
|         "content-type": "~1.0.4", | ||||
|         "cookie": "0.5.0", | ||||
|         "cookie": "0.6.0", | ||||
|         "cookie-signature": "1.0.6", | ||||
|         "debug": "2.6.9", | ||||
|         "depd": "2.0.0", | ||||
| @@ -6825,9 +6866,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/fill-range": { | ||||
|       "version": "7.0.1", | ||||
|       "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", | ||||
|       "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", | ||||
|       "version": "7.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", | ||||
|       "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "to-regex-range": "^5.0.1" | ||||
|       }, | ||||
| @@ -6904,15 +6946,16 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/follow-redirects": { | ||||
|       "version": "1.15.3", | ||||
|       "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", | ||||
|       "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", | ||||
|       "version": "1.15.8", | ||||
|       "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.8.tgz", | ||||
|       "integrity": "sha512-xgrmBhBToVKay1q2Tao5LI26B83UhrB/vM1avwVSDzt8rx3rO6AizBAaF46EgksTVr+rFTQaqZZ9MVBfUe4nig==", | ||||
|       "funding": [ | ||||
|         { | ||||
|           "type": "individual", | ||||
|           "url": "https://github.com/sponsors/RubenVerborgh" | ||||
|         } | ||||
|       ], | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">=4.0" | ||||
|       }, | ||||
| @@ -7139,15 +7182,20 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/get-intrinsic": { | ||||
|       "version": "1.2.2", | ||||
|       "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", | ||||
|       "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", | ||||
|       "version": "1.2.4", | ||||
|       "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", | ||||
|       "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "es-errors": "^1.3.0", | ||||
|         "function-bind": "^1.1.2", | ||||
|         "has-proto": "^1.0.1", | ||||
|         "has-symbols": "^1.0.3", | ||||
|         "hasown": "^2.0.0" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
|       } | ||||
| @@ -7206,7 +7254,8 @@ | ||||
|     "node_modules/glob-to-regexp": { | ||||
|       "version": "0.4.1", | ||||
|       "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", | ||||
|       "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" | ||||
|       "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", | ||||
|       "license": "BSD-2-Clause" | ||||
|     }, | ||||
|     "node_modules/global-dirs": { | ||||
|       "version": "3.0.1", | ||||
| @@ -7405,11 +7454,12 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/has-property-descriptors": { | ||||
|       "version": "1.0.1", | ||||
|       "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", | ||||
|       "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", | ||||
|       "version": "1.0.2", | ||||
|       "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", | ||||
|       "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "get-intrinsic": "^1.2.2" | ||||
|         "es-define-property": "^1.0.0" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
| @@ -7949,6 +7999,7 @@ | ||||
|       "version": "0.4.24", | ||||
|       "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", | ||||
|       "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "safer-buffer": ">= 2.1.2 < 3" | ||||
|       }, | ||||
| @@ -8258,6 +8309,7 @@ | ||||
|       "version": "7.0.0", | ||||
|       "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", | ||||
|       "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">=0.12.0" | ||||
|       } | ||||
| @@ -9105,6 +9157,7 @@ | ||||
|       "version": "0.3.0", | ||||
|       "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", | ||||
|       "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.6" | ||||
|       } | ||||
| @@ -11285,11 +11338,12 @@ | ||||
|       ] | ||||
|     }, | ||||
|     "node_modules/micromatch": { | ||||
|       "version": "4.0.5", | ||||
|       "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", | ||||
|       "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", | ||||
|       "version": "4.0.8", | ||||
|       "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", | ||||
|       "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "braces": "^3.0.2", | ||||
|         "braces": "^3.0.3", | ||||
|         "picomatch": "^2.3.1" | ||||
|       }, | ||||
|       "engines": { | ||||
| @@ -11543,9 +11597,13 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/object-inspect": { | ||||
|       "version": "1.13.1", | ||||
|       "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", | ||||
|       "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", | ||||
|       "version": "1.13.2", | ||||
|       "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", | ||||
|       "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
|       } | ||||
| @@ -12684,6 +12742,7 @@ | ||||
|       "version": "6.11.0", | ||||
|       "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", | ||||
|       "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", | ||||
|       "license": "BSD-3-Clause", | ||||
|       "dependencies": { | ||||
|         "side-channel": "^1.0.4" | ||||
|       }, | ||||
| @@ -12749,9 +12808,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/raw-body": { | ||||
|       "version": "2.5.1", | ||||
|       "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", | ||||
|       "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", | ||||
|       "version": "2.5.2", | ||||
|       "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", | ||||
|       "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "bytes": "3.1.2", | ||||
|         "http-errors": "2.0.0", | ||||
| @@ -12766,6 +12826,7 @@ | ||||
|       "version": "3.1.2", | ||||
|       "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", | ||||
|       "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.8" | ||||
|       } | ||||
| @@ -13886,14 +13947,17 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/set-function-length": { | ||||
|       "version": "1.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", | ||||
|       "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", | ||||
|       "version": "1.2.2", | ||||
|       "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", | ||||
|       "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "define-data-property": "^1.1.1", | ||||
|         "get-intrinsic": "^1.2.1", | ||||
|         "define-data-property": "^1.1.4", | ||||
|         "es-errors": "^1.3.0", | ||||
|         "function-bind": "^1.1.2", | ||||
|         "get-intrinsic": "^1.2.4", | ||||
|         "gopd": "^1.0.1", | ||||
|         "has-property-descriptors": "^1.0.0" | ||||
|         "has-property-descriptors": "^1.0.2" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
| @@ -13964,13 +14028,18 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/side-channel": { | ||||
|       "version": "1.0.4", | ||||
|       "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", | ||||
|       "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", | ||||
|       "version": "1.0.6", | ||||
|       "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", | ||||
|       "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "call-bind": "^1.0.0", | ||||
|         "get-intrinsic": "^1.0.2", | ||||
|         "object-inspect": "^1.9.0" | ||||
|         "call-bind": "^1.0.7", | ||||
|         "es-errors": "^1.3.0", | ||||
|         "get-intrinsic": "^1.2.4", | ||||
|         "object-inspect": "^1.13.1" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 0.4" | ||||
|       }, | ||||
|       "funding": { | ||||
|         "url": "https://github.com/sponsors/ljharb" | ||||
| @@ -14393,15 +14462,16 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/terser-webpack-plugin": { | ||||
|       "version": "5.3.9", | ||||
|       "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz", | ||||
|       "integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==", | ||||
|       "version": "5.3.10", | ||||
|       "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz", | ||||
|       "integrity": "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@jridgewell/trace-mapping": "^0.3.17", | ||||
|         "@jridgewell/trace-mapping": "^0.3.20", | ||||
|         "jest-worker": "^27.4.5", | ||||
|         "schema-utils": "^3.1.1", | ||||
|         "serialize-javascript": "^6.0.1", | ||||
|         "terser": "^5.16.8" | ||||
|         "terser": "^5.26.0" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">= 10.13.0" | ||||
| @@ -14534,6 +14604,7 @@ | ||||
|       "version": "5.0.1", | ||||
|       "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", | ||||
|       "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "is-number": "^7.0.0" | ||||
|       }, | ||||
| @@ -14603,6 +14674,7 @@ | ||||
|       "version": "1.6.18", | ||||
|       "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", | ||||
|       "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "media-typer": "0.3.0", | ||||
|         "mime-types": "~2.1.24" | ||||
| @@ -14615,6 +14687,7 @@ | ||||
|       "version": "1.52.0", | ||||
|       "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", | ||||
|       "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">= 0.6" | ||||
|       } | ||||
| @@ -14623,6 +14696,7 @@ | ||||
|       "version": "2.1.35", | ||||
|       "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", | ||||
|       "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "mime-db": "1.52.0" | ||||
|       }, | ||||
| @@ -15154,9 +15228,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/watchpack": { | ||||
|       "version": "2.4.0", | ||||
|       "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", | ||||
|       "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", | ||||
|       "version": "2.4.2", | ||||
|       "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz", | ||||
|       "integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "glob-to-regexp": "^0.4.1", | ||||
|         "graceful-fs": "^4.1.2" | ||||
| @@ -15188,33 +15263,33 @@ | ||||
|       "integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA==" | ||||
|     }, | ||||
|     "node_modules/webpack": { | ||||
|       "version": "5.89.0", | ||||
|       "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.89.0.tgz", | ||||
|       "integrity": "sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw==", | ||||
|       "version": "5.94.0", | ||||
|       "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", | ||||
|       "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@types/eslint-scope": "^3.7.3", | ||||
|         "@types/estree": "^1.0.0", | ||||
|         "@webassemblyjs/ast": "^1.11.5", | ||||
|         "@webassemblyjs/wasm-edit": "^1.11.5", | ||||
|         "@webassemblyjs/wasm-parser": "^1.11.5", | ||||
|         "@types/estree": "^1.0.5", | ||||
|         "@webassemblyjs/ast": "^1.12.1", | ||||
|         "@webassemblyjs/wasm-edit": "^1.12.1", | ||||
|         "@webassemblyjs/wasm-parser": "^1.12.1", | ||||
|         "acorn": "^8.7.1", | ||||
|         "acorn-import-assertions": "^1.9.0", | ||||
|         "browserslist": "^4.14.5", | ||||
|         "acorn-import-attributes": "^1.9.5", | ||||
|         "browserslist": "^4.21.10", | ||||
|         "chrome-trace-event": "^1.0.2", | ||||
|         "enhanced-resolve": "^5.15.0", | ||||
|         "enhanced-resolve": "^5.17.1", | ||||
|         "es-module-lexer": "^1.2.1", | ||||
|         "eslint-scope": "5.1.1", | ||||
|         "events": "^3.2.0", | ||||
|         "glob-to-regexp": "^0.4.1", | ||||
|         "graceful-fs": "^4.2.9", | ||||
|         "graceful-fs": "^4.2.11", | ||||
|         "json-parse-even-better-errors": "^2.3.1", | ||||
|         "loader-runner": "^4.2.0", | ||||
|         "mime-types": "^2.1.27", | ||||
|         "neo-async": "^2.6.2", | ||||
|         "schema-utils": "^3.2.0", | ||||
|         "tapable": "^2.1.1", | ||||
|         "terser-webpack-plugin": "^5.3.7", | ||||
|         "watchpack": "^2.4.0", | ||||
|         "terser-webpack-plugin": "^5.3.10", | ||||
|         "watchpack": "^2.4.1", | ||||
|         "webpack-sources": "^3.2.3" | ||||
|       }, | ||||
|       "bin": { | ||||
| @@ -15268,9 +15343,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/webpack-dev-middleware": { | ||||
|       "version": "5.3.3", | ||||
|       "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", | ||||
|       "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", | ||||
|       "version": "5.3.4", | ||||
|       "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", | ||||
|       "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "colorette": "^2.0.10", | ||||
|         "memfs": "^3.4.3", | ||||
| @@ -15375,9 +15451,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/webpack-dev-server/node_modules/ws": { | ||||
|       "version": "8.15.1", | ||||
|       "resolved": "https://registry.npmjs.org/ws/-/ws-8.15.1.tgz", | ||||
|       "integrity": "sha512-W5OZiCjXEmk0yZ66ZN82beM5Sz7l7coYxpRkzS+p9PP+ToQry8szKh+61eNktr7EA9DOwvFGhfC605jDHbP6QQ==", | ||||
|       "version": "8.18.0", | ||||
|       "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", | ||||
|       "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">=10.0.0" | ||||
|       }, | ||||
| @@ -15619,9 +15696,10 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/ws": { | ||||
|       "version": "7.5.9", | ||||
|       "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", | ||||
|       "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", | ||||
|       "version": "7.5.10", | ||||
|       "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", | ||||
|       "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", | ||||
|       "license": "MIT", | ||||
|       "engines": { | ||||
|         "node": ">=8.3.0" | ||||
|       }, | ||||
|   | ||||
| @@ -15,6 +15,7 @@ | ||||
|   }, | ||||
|   "dependencies": { | ||||
|     "@docusaurus/core": "^3.4.0", | ||||
|     "@docusaurus/plugin-google-gtag": "^3.4.0", | ||||
|     "@docusaurus/preset-classic": "^3.4.0", | ||||
|     "@docusaurus/theme-mermaid": "^3.4.0", | ||||
|     "@mdx-js/react": "^3.0.0", | ||||
|   | ||||
							
								
								
									
										41
									
								
								docs/src/components/BuyMeACoffee/index.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								docs/src/components/BuyMeACoffee/index.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| import React from "react"; | ||||
|  | ||||
| class BuyMeACoffee extends React.Component { | ||||
|   constructor(props) { | ||||
|     super(props) | ||||
|     let script = document.createElement("script"); | ||||
|     script.src = 'https://cdnjs.buymeacoffee.com/1.0.0/widget.prod.min.js'; | ||||
|     script.dataset.name = 'BMC-Widget'; | ||||
|     script.dataset.cfasync = 'false'; | ||||
|     script.dataset.id = 'amithkoujalgi'; | ||||
|     script.dataset.description = 'Support me on Buy me a coffee!'; | ||||
|     script.dataset.message = 'If you like my work and want to say thanks, or encourage me to do more, you can buy me a coffee! 😊'; | ||||
|     script.dataset.color = '#2e8555'; | ||||
|     script.dataset.position = 'Right'; | ||||
|     script.dataset.x_margin = '18'; | ||||
|     script.dataset.y_margin = '18'; | ||||
|     script.async = true | ||||
|  | ||||
|     script.onload = function () { | ||||
|       let evt = document.createEvent('Event'); | ||||
|       evt.initEvent('DOMContentLoaded', false, false); | ||||
|       window.dispatchEvent(evt); | ||||
|     } | ||||
|     this.script = script | ||||
|   } | ||||
|  | ||||
|   componentDidMount() { | ||||
|     document.head.appendChild(this.script) | ||||
|   } | ||||
|  | ||||
|   // componentWillUnmount() { | ||||
|   //   document.head.removeChild(this.script); | ||||
|   //   document.body.removeChild(document.getElementById("bmc-wbtn")) | ||||
|   // } | ||||
|  | ||||
|   render() { | ||||
|     return null | ||||
|   } | ||||
| } | ||||
|  | ||||
| export default BuyMeACoffee; | ||||
| @@ -37,4 +37,12 @@ div > h1, | ||||
| header > h1, | ||||
| h2 > a { | ||||
|     font-size: 2rem !important; | ||||
| } | ||||
|  | ||||
| #bmc-wbtn{ | ||||
|     bottom: 15px; | ||||
| } | ||||
|  | ||||
| #bmc-wbtn + div{ | ||||
|     bottom:15px; | ||||
| } | ||||
| @@ -3,38 +3,43 @@ import Link from '@docusaurus/Link'; | ||||
| import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; | ||||
| import Layout from '@theme/Layout'; | ||||
| import HomepageFeatures from '@site/src/components/HomepageFeatures'; | ||||
|  | ||||
| import BuyMeACoffee from '@site/src/components/BuyMeACoffee'; | ||||
| import Heading from '@theme/Heading'; | ||||
| import styles from './index.module.css'; | ||||
| import BrowserOnly from '@docusaurus/BrowserOnly'; | ||||
|  | ||||
| function HomepageHeader() { | ||||
|     const {siteConfig} = useDocusaurusContext(); | ||||
|     return (<header className={clsx('hero hero--primary', styles.heroBanner)}> | ||||
|         <div className="container"> | ||||
|             <Heading as="h1" className="hero__title"> | ||||
|                 {siteConfig.title} | ||||
|             </Heading> | ||||
|             <img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo} style={{maxWidth: '20vh'}}/> | ||||
|             <p className="hero__subtitle">{siteConfig.tagline}</p> | ||||
|             <div className={styles.buttons}> | ||||
|                 <Link | ||||
|                     className="button button--secondary button--lg" | ||||
|                     to="/intro"> | ||||
|                     Getting Started | ||||
|                 </Link> | ||||
|             </div> | ||||
|         </div> | ||||
|     </header>); | ||||
|   const {siteConfig} = useDocusaurusContext(); | ||||
|   return (<header className={clsx('hero hero--primary', styles.heroBanner)}> | ||||
|     <div className="container"> | ||||
|       <Heading as="h1" className="hero__title"> | ||||
|         {siteConfig.title} | ||||
|       </Heading> | ||||
|       <img src="img/logo.svg" alt="Ollama4j Logo" className={styles.logo} | ||||
|            style={{maxWidth: '20vh'}}/> | ||||
|       <p className="hero__subtitle">{siteConfig.tagline}</p> | ||||
|       <div className={styles.buttons}> | ||||
|         <Link | ||||
|             className="button button--secondary button--lg" | ||||
|             to="/intro"> | ||||
|           Getting Started | ||||
|         </Link> | ||||
|       </div> | ||||
|     </div> | ||||
|   </header>); | ||||
| } | ||||
|  | ||||
| export default function Home() { | ||||
|     const {siteConfig} = useDocusaurusContext(); | ||||
|     return (<Layout | ||||
|         title={`Hello from ${siteConfig.title}`} | ||||
|         description="Description will go into a meta tag in <head />"> | ||||
|         <HomepageHeader/> | ||||
|         <main> | ||||
|             <HomepageFeatures/> | ||||
|         </main> | ||||
|     </Layout>); | ||||
| } | ||||
|   const {siteConfig} = useDocusaurusContext(); | ||||
|   return (<Layout | ||||
|       title={`Hello from ${siteConfig.title}`} | ||||
|       description="Description will go into a meta tag in <head />"> | ||||
|     <HomepageHeader/> | ||||
|     <main> | ||||
|       <HomepageFeatures/> | ||||
|       <BrowserOnly> | ||||
|         {() => <BuyMeACoffee />} | ||||
|       </BrowserOnly> | ||||
|     </main> | ||||
|   </Layout>); | ||||
| } | ||||
| @@ -1,68 +0,0 @@ | ||||
| ## This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||
| ## For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||
| # | ||||
| #name: Test and Publish Package | ||||
| # | ||||
| ##on: | ||||
| ##  release: | ||||
| ##    types: [ "created" ] | ||||
| # | ||||
| #on: | ||||
| #  push: | ||||
| #    branches: [ "main" ] | ||||
| #  workflow_dispatch: | ||||
| # | ||||
| #jobs: | ||||
| #  build: | ||||
| #    runs-on: ubuntu-latest | ||||
| #    permissions: | ||||
| #      contents: write | ||||
| #      packages: write | ||||
| #    steps: | ||||
| #      - uses: actions/checkout@v3 | ||||
| #      - name: Set up JDK 11 | ||||
| #        uses: actions/setup-java@v3 | ||||
| #        with: | ||||
| #          java-version: '11' | ||||
| #          distribution: 'adopt-hotspot' | ||||
| #          server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||
| #          settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||
| #      - name: Build with Maven | ||||
| #        run: mvn --file pom.xml -U clean package -Punit-tests | ||||
| #      - name: Set up Apache Maven Central (Overwrite settings.xml) | ||||
| #        uses: actions/setup-java@v3 | ||||
| #        with: # running setup-java again overwrites the settings.xml | ||||
| #          java-version: '11' | ||||
| #          distribution: 'adopt-hotspot' | ||||
| #          cache: 'maven' | ||||
| #          server-id: ossrh | ||||
| #          server-username: MAVEN_USERNAME | ||||
| #          server-password: MAVEN_PASSWORD | ||||
| #          gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||
| #          gpg-passphrase: MAVEN_GPG_PASSPHRASE | ||||
| #      - name: Set up Maven cache | ||||
| #        uses: actions/cache@v3 | ||||
| #        with: | ||||
| #          path: ~/.m2/repository | ||||
| #          key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} | ||||
| #          restore-keys: | | ||||
| #            ${{ runner.os }}-maven- | ||||
| #      - name: Build | ||||
| #        run: mvn -B -ntp clean install | ||||
| #      - name: Upload coverage reports to Codecov | ||||
| #        uses: codecov/codecov-action@v3 | ||||
| #        env: | ||||
| #          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | ||||
| #      - name: Publish to GitHub Packages Apache Maven | ||||
| #        #        if: > | ||||
| #        #          github.event_name != 'pull_request' && | ||||
| #        #          github.ref_name == 'main' && | ||||
| #        #          contains(github.event.head_commit.message, 'release') | ||||
| #        run: | | ||||
| #          git config --global user.email "koujalgi.amith@gmail.com" | ||||
| #          git config --global user.name "amithkoujalgi" | ||||
| #          mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform | ||||
| #        env: | ||||
| #          MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} | ||||
| #          MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} | ||||
| #          MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} | ||||
							
								
								
									
										181
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										181
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -3,13 +3,13 @@ | ||||
|          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||||
|     <modelVersion>4.0.0</modelVersion> | ||||
|  | ||||
|     <groupId>io.github.amithkoujalgi</groupId> | ||||
|     <groupId>io.github.ollama4j</groupId> | ||||
|     <artifactId>ollama4j</artifactId> | ||||
|     <version>ollama4j-revision</version> | ||||
|  | ||||
|     <name>Ollama4j</name> | ||||
|     <description>Java library for interacting with Ollama API.</description> | ||||
|     <url>https://github.com/amithkoujalgi/ollama4j</url> | ||||
|     <url>https://github.com/ollama4j/ollama4j</url> | ||||
|     <packaging>jar</packaging> | ||||
|  | ||||
|     <properties> | ||||
| @@ -33,15 +33,15 @@ | ||||
|     <licenses> | ||||
|         <license> | ||||
|             <name>MIT License</name> | ||||
|             <url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url> | ||||
|             <url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url> | ||||
|         </license> | ||||
|     </licenses> | ||||
|  | ||||
|     <scm> | ||||
|         <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> | ||||
|         <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> | ||||
|         <url>https://github.com/amithkoujalgi/ollama4j</url> | ||||
|         <tag>v1.0.16</tag> | ||||
|         <connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection> | ||||
|         <developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection> | ||||
|         <url>https://github.com/ollama4j/ollama4j</url> | ||||
|         <tag>ollama4j-revision</tag> | ||||
|     </scm> | ||||
|  | ||||
|     <build> | ||||
| @@ -63,6 +63,10 @@ | ||||
|                 <groupId>org.apache.maven.plugins</groupId> | ||||
|                 <artifactId>maven-javadoc-plugin</artifactId> | ||||
|                 <version>3.5.0</version> | ||||
|                 <configuration> | ||||
|                     <!-- to disable the "missing" warnings. Remove the doclint to enable warnings--> | ||||
|                     <doclint>all,-missing</doclint> | ||||
|                 </configuration> | ||||
|                 <executions> | ||||
|                     <execution> | ||||
|                         <id>attach-javadocs</id> | ||||
| @@ -72,27 +76,7 @@ | ||||
|                     </execution> | ||||
|                 </executions> | ||||
|             </plugin> | ||||
|             <!--            <plugin>--> | ||||
|             <!--                <groupId>org.apache.maven.plugins</groupId>--> | ||||
|             <!--                <artifactId>maven-gpg-plugin</artifactId>--> | ||||
|             <!--                <version>1.5</version>--> | ||||
|             <!--                <executions>--> | ||||
|             <!--                    <execution>--> | ||||
|             <!--                        <id>sign-artifacts</id>--> | ||||
|             <!--                        <phase>verify</phase>--> | ||||
|             <!--                        <goals>--> | ||||
|             <!--                            <goal>sign</goal>--> | ||||
|             <!--                        </goals>--> | ||||
|             <!--                        <configuration>--> | ||||
|             <!--                            <!– This is necessary for gpg to not try to use the pinentry programs –>--> | ||||
|             <!--                            <gpgArguments>--> | ||||
|             <!--                                <arg>--pinentry-mode</arg>--> | ||||
|             <!--                                <arg>loopback</arg>--> | ||||
|             <!--                            </gpgArguments>--> | ||||
|             <!--                        </configuration>--> | ||||
|             <!--                    </execution>--> | ||||
|             <!--                </executions>--> | ||||
|             <!--            </plugin>--> | ||||
|  | ||||
|             <!-- Surefire Plugin for Unit Tests --> | ||||
|             <plugin> | ||||
|                 <groupId>org.apache.maven.plugins</groupId> | ||||
| @@ -129,15 +113,23 @@ | ||||
|                     </execution> | ||||
|                 </executions> | ||||
|             </plugin> | ||||
|             <!--            <plugin>--> | ||||
|             <!--                <groupId>org.apache.maven.plugins</groupId>--> | ||||
|             <!--                <artifactId>maven-release-plugin</artifactId>--> | ||||
|             <!--                <version>3.0.1</version>--> | ||||
|             <!--                <configuration>--> | ||||
|             <!--                    <!–                    <goals>install</goals>–>--> | ||||
|             <!--                    <tagNameFormat>v@{project.version}</tagNameFormat>--> | ||||
|             <!--                </configuration>--> | ||||
|             <!--            </plugin>--> | ||||
|  | ||||
|  | ||||
|             <plugin> | ||||
|                 <groupId>org.apache.maven.plugins</groupId> | ||||
|                 <artifactId>maven-gpg-plugin</artifactId> | ||||
|                 <version>1.5</version> | ||||
|                 <executions> | ||||
|                     <execution> | ||||
|                         <id>sign-artifacts</id> | ||||
|                         <phase>verify</phase> | ||||
|                         <goals> | ||||
|                             <goal>sign</goal> | ||||
|                         </goals> | ||||
|                     </execution> | ||||
|                 </executions> | ||||
|             </plugin> | ||||
|  | ||||
|         </plugins> | ||||
|     </build> | ||||
|  | ||||
| @@ -148,6 +140,11 @@ | ||||
|             <version>${lombok.version}</version> | ||||
|             <scope>provided</scope> | ||||
|         </dependency> | ||||
|         <dependency> | ||||
|             <groupId>org.jsoup</groupId> | ||||
|             <artifactId>jsoup</artifactId> | ||||
|             <version>1.18.1</version> | ||||
|         </dependency> | ||||
|         <dependency> | ||||
|             <groupId>com.fasterxml.jackson.core</groupId> | ||||
|             <artifactId>jackson-databind</artifactId> | ||||
| @@ -189,27 +186,39 @@ | ||||
|         </dependency> | ||||
|     </dependencies> | ||||
|  | ||||
|     <!--    <distributionManagement>--> | ||||
|     <!--        <snapshotRepository>--> | ||||
|     <!--            <id>ossrh</id>--> | ||||
|     <!--            <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>--> | ||||
|     <!--        </snapshotRepository>--> | ||||
|     <!--        <repository>--> | ||||
|     <!--            <id>ossrh</id>--> | ||||
|     <!--            <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url>--> | ||||
|     <!--        </repository>--> | ||||
|     <!--    </distributionManagement>--> | ||||
|  | ||||
|     <!--    Replaced publishing packages to GitHub Packages instead of Maven central --> | ||||
|     <distributionManagement> | ||||
|         <repository> | ||||
|             <id>github</id> | ||||
|             <name>GitHub Packages</name> | ||||
|             <url>https://maven.pkg.github.com/amithkoujalgi/ollama4j</url> | ||||
|             <id>mvn-repo-id</id> | ||||
|         </repository> | ||||
|     </distributionManagement> | ||||
|  | ||||
|     <profiles> | ||||
|         <profile> | ||||
|             <id>ossrh</id> | ||||
|             <activation> | ||||
|                 <activeByDefault>true</activeByDefault> | ||||
|             </activation> | ||||
|             <properties> | ||||
|                 <gpg.executable>gpg2</gpg.executable> | ||||
|                 <test.env>unit</test.env> | ||||
|                 <skipUnitTests>false</skipUnitTests> | ||||
|                 <skipIntegrationTests>true</skipIntegrationTests> | ||||
|             </properties> | ||||
|             <build> | ||||
|                 <plugins> | ||||
|                     <plugin> | ||||
|                         <groupId>org.sonatype.central</groupId> | ||||
|                         <artifactId>central-publishing-maven-plugin</artifactId> | ||||
|                         <version>0.5.0</version> | ||||
|                         <extensions>true</extensions> | ||||
|                         <configuration> | ||||
|                             <publishingServerId>mvn-repo-id</publishingServerId> | ||||
|                             <autoPublish>true</autoPublish> | ||||
|                         </configuration> | ||||
|                     </plugin> | ||||
|                 </plugins> | ||||
|             </build> | ||||
|         </profile> | ||||
|         <profile> | ||||
|             <id>unit-tests</id> | ||||
|             <properties> | ||||
| @@ -218,7 +227,7 @@ | ||||
|                 <skipIntegrationTests>true</skipIntegrationTests> | ||||
|             </properties> | ||||
|             <activation> | ||||
|                 <activeByDefault>true</activeByDefault> | ||||
|                 <activeByDefault>false</activeByDefault> | ||||
|             </activation> | ||||
|             <build> | ||||
|                 <plugins> | ||||
| @@ -261,39 +270,39 @@ | ||||
|             </properties> | ||||
|             <build> | ||||
|                 <plugins> | ||||
|                     <!--                    <plugin>--> | ||||
|                     <!--                        <groupId>org.apache.maven.plugins</groupId>--> | ||||
|                     <!--                        <artifactId>maven-gpg-plugin</artifactId>--> | ||||
|                     <!--                        <version>3.1.0</version>--> | ||||
|                     <!--                        <executions>--> | ||||
|                     <!--                            <execution>--> | ||||
|                     <!--                                <id>sign-artifacts</id>--> | ||||
|                     <!--                                <phase>verify</phase>--> | ||||
|                     <!--                                <goals>--> | ||||
|                     <!--                                    <goal>sign</goal>--> | ||||
|                     <!--                                </goals>--> | ||||
|                     <!--                                <configuration>--> | ||||
|                     <!--                                    <!– Prevent gpg from using pinentry programs. Fixes:--> | ||||
|                     <!--                                         gpg: signing failed: Inappropriate ioctl for device –>--> | ||||
|                     <!--                                    <gpgArguments>--> | ||||
|                     <!--                                        <arg>--pinentry-mode</arg>--> | ||||
|                     <!--                                        <arg>loopback</arg>--> | ||||
|                     <!--                                    </gpgArguments>--> | ||||
|                     <!--                                </configuration>--> | ||||
|                     <!--                            </execution>--> | ||||
|                     <!--                        </executions>--> | ||||
|                     <!--                    </plugin>--> | ||||
|                     <!--                    <plugin>--> | ||||
|                     <!--                        <groupId>org.sonatype.plugins</groupId>--> | ||||
|                     <!--                        <artifactId>nexus-staging-maven-plugin</artifactId>--> | ||||
|                     <!--                        <version>1.6.13</version>--> | ||||
|                     <!--                        <extensions>true</extensions>--> | ||||
|                     <!--                        <configuration>--> | ||||
|                     <!--                            <serverId>ossrh</serverId>--> | ||||
|                     <!--                            <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>--> | ||||
|                     <!--                            <autoReleaseAfterClose>true</autoReleaseAfterClose>--> | ||||
|                     <!--                        </configuration>--> | ||||
|                     <!--                    </plugin>--> | ||||
|                     <plugin> | ||||
|                         <groupId>org.apache.maven.plugins</groupId> | ||||
|                         <artifactId>maven-gpg-plugin</artifactId> | ||||
|                         <version>3.1.0</version> | ||||
|                         <executions> | ||||
|                             <execution> | ||||
|                                 <id>sign-artifacts</id> | ||||
|                                 <phase>verify</phase> | ||||
|                                 <goals> | ||||
|                                     <goal>sign</goal> | ||||
|                                 </goals> | ||||
|                                 <configuration> | ||||
|                                     <!-- Prevent gpg from using pinentry programs. Fixes: | ||||
|                                          gpg: signing failed: Inappropriate ioctl for device --> | ||||
|                                     <gpgArguments> | ||||
|                                         <arg>--pinentry-mode</arg> | ||||
|                                         <arg>loopback</arg> | ||||
|                                     </gpgArguments> | ||||
|                                 </configuration> | ||||
|                             </execution> | ||||
|                         </executions> | ||||
|                     </plugin> | ||||
|                     <plugin> | ||||
|                         <groupId>org.sonatype.plugins</groupId> | ||||
|                         <artifactId>nexus-staging-maven-plugin</artifactId> | ||||
|                         <version>1.6.13</version> | ||||
|                         <extensions>true</extensions> | ||||
|                         <configuration> | ||||
|                             <serverId>ossrh</serverId> | ||||
|                             <nexusUrl>https://s01.oss.sonatype.org/</nexusUrl> | ||||
|                             <autoReleaseAfterClose>true</autoReleaseAfterClose> | ||||
|                         </configuration> | ||||
|                     </plugin> | ||||
|  | ||||
|                     <plugin> | ||||
|                         <groupId>org.jacoco</groupId> | ||||
|   | ||||
| @@ -1,624 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.*; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.*; | ||||
| import io.github.amithkoujalgi.ollama4j.core.tools.*; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.*; | ||||
| import java.net.URI; | ||||
| import java.net.URISyntaxException; | ||||
| import java.net.http.HttpClient; | ||||
| import java.net.http.HttpConnectTimeoutException; | ||||
| import java.net.http.HttpRequest; | ||||
| import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| import java.nio.file.Files; | ||||
| import java.time.Duration; | ||||
| import java.util.*; | ||||
|  | ||||
| /** | ||||
|  * The base Ollama API class. | ||||
|  */ | ||||
| @SuppressWarnings("DuplicatedCode") | ||||
| public class OllamaAPI { | ||||
|  | ||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||
|     private final String host; | ||||
|     private long requestTimeoutSeconds = 10; | ||||
|     private boolean verbose = true; | ||||
|     private BasicAuth basicAuth; | ||||
|  | ||||
|     /** | ||||
|      * Instantiates the Ollama API. | ||||
|      * | ||||
|      * @param host the host address of Ollama server | ||||
|      */ | ||||
|     public OllamaAPI(String host) { | ||||
|         if (host.endsWith("/")) { | ||||
|             this.host = host.substring(0, host.length() - 1); | ||||
|         } else { | ||||
|             this.host = host; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set request timeout in seconds. Default is 3 seconds. | ||||
|      * | ||||
|      * @param requestTimeoutSeconds the request timeout in seconds | ||||
|      */ | ||||
|     public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { | ||||
|         this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set/unset logging of responses | ||||
|      * | ||||
|      * @param verbose true/false | ||||
|      */ | ||||
|     public void setVerbose(boolean verbose) { | ||||
|         this.verbose = verbose; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. | ||||
|      * | ||||
|      * @param username the username | ||||
|      * @param password the password | ||||
|      */ | ||||
|     public void setBasicAuth(String username, String password) { | ||||
|         this.basicAuth = new BasicAuth(username, password); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * API to check the reachability of Ollama server. | ||||
|      * | ||||
|      * @return true if the server is reachable, false otherwise. | ||||
|      */ | ||||
|     public boolean ping() { | ||||
|         String url = this.host + "/api/tags"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = null; | ||||
|         try { | ||||
|             httpRequest = | ||||
|                     getRequestBuilderDefault(new URI(url)) | ||||
|                             .header("Accept", "application/json") | ||||
|                             .header("Content-type", "application/json") | ||||
|                             .GET() | ||||
|                             .build(); | ||||
|         } catch (URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|         HttpResponse<String> response = null; | ||||
|         try { | ||||
|             response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         } catch (HttpConnectTimeoutException e) { | ||||
|             return false; | ||||
|         } catch (IOException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|         int statusCode = response.statusCode(); | ||||
|         return statusCode == 200; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * List available models from Ollama server. | ||||
|      * | ||||
|      * @return the list | ||||
|      */ | ||||
|     public List<Model> listModels() | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/tags"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .GET() | ||||
|                         .build(); | ||||
|         HttpResponse<String> response = | ||||
|                 httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper() | ||||
|                     .readValue(responseString, ListModelsResponse.class) | ||||
|                     .getModels(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Pull a model on the Ollama server from the list of <a | ||||
|      * href="https://ollama.ai/library">available models</a>. | ||||
|      * | ||||
|      * @param modelName the name of the model | ||||
|      */ | ||||
|     public void pullModel(String modelName) | ||||
|             throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         String url = this.host + "/api/pull"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<InputStream> response = | ||||
|                 client.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         InputStream responseBodyStream = response.body(); | ||||
|         String responseString = ""; | ||||
|         try (BufferedReader reader = | ||||
|                      new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|             String line; | ||||
|             while ((line = reader.readLine()) != null) { | ||||
|                 ModelPullResponse modelPullResponse = | ||||
|                         Utils.getObjectMapper().readValue(line, ModelPullResponse.class); | ||||
|                 if (verbose) { | ||||
|                     logger.info(modelPullResponse.getStatus()); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Gets model details from the Ollama server. | ||||
|      * | ||||
|      * @param modelName the model | ||||
|      * @return the model details | ||||
|      */ | ||||
|     public ModelDetail getModelDetails(String modelName) | ||||
|             throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/show"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)) | ||||
|                         .build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Create a custom model from a model file. Read more about custom model file creation <a | ||||
|      * href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>. | ||||
|      * | ||||
|      * @param modelName     the name of the custom model to be created. | ||||
|      * @param modelFilePath the path to model file that exists on the Ollama server. | ||||
|      */ | ||||
|     public void createModelWithFilePath(String modelName, String modelFilePath) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|         // FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this | ||||
|         // if the issue is fixed in the Ollama API server. | ||||
|         if (responseString.contains("error")) { | ||||
|             throw new OllamaBaseException(responseString); | ||||
|         } | ||||
|         if (verbose) { | ||||
|             logger.info(responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Create a custom model from a model file. Read more about custom model file creation <a | ||||
|      * href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>. | ||||
|      * | ||||
|      * @param modelName         the name of the custom model to be created. | ||||
|      * @param modelFileContents the path to model file that exists on the Ollama server. | ||||
|      */ | ||||
|     public void createModelWithModelFileContents(String modelName, String modelFileContents) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|         if (responseString.contains("error")) { | ||||
|             throw new OllamaBaseException(responseString); | ||||
|         } | ||||
|         if (verbose) { | ||||
|             logger.info(responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Delete a model from Ollama server. | ||||
|      * | ||||
|      * @param modelName          the name of the model to be deleted. | ||||
|      * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. | ||||
|      */ | ||||
|     public void deleteModel(String modelName, boolean ignoreIfNotPresent) | ||||
|             throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/delete"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = | ||||
|                 getRequestBuilderDefault(new URI(url)) | ||||
|                         .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .header("Content-type", "application/json") | ||||
|                         .build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) { | ||||
|             return; | ||||
|         } | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings for a given text from a model | ||||
|      * | ||||
|      * @param model  name of model to generate embeddings from | ||||
|      * @param prompt text to generate embeddings for | ||||
|      * @return embeddings | ||||
|      */ | ||||
|     public List<Double> generateEmbeddings(String model, String prompt) | ||||
|             throws IOException, InterruptedException, OllamaBaseException { | ||||
|         return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. | ||||
|      * | ||||
|      * @param modelRequest request for '/api/embeddings' endpoint | ||||
|      * @return embeddings | ||||
|      */ | ||||
|     public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         URI uri = URI.create(this.host + "/api/embeddings"); | ||||
|         String jsonData = modelRequest.toString(); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest.Builder requestBuilder = | ||||
|                 getRequestBuilderDefault(uri) | ||||
|                         .header("Accept", "application/json") | ||||
|                         .POST(HttpRequest.BodyPublishers.ofString(jsonData)); | ||||
|         HttpRequest request = requestBuilder.build(); | ||||
|         HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             OllamaEmbeddingResponseModel embeddingResponse = | ||||
|                     Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||
|             return embeddingResponse.getEmbedding(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server. This is a sync/blocking | ||||
|      * call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @param model   Model to use | ||||
|      * @param prompt  Prompt text | ||||
|      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||
|      * @param options Additional Options | ||||
|      * @return OllamaResult | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generate(model, prompt, raw, options, null); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     public OllamaToolsResult generateWithTools(String model, String prompt, boolean raw, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||
|         Map<ToolDef, Object> toolResults = new HashMap<>(); | ||||
|  | ||||
|         OllamaResult result = generate(model, prompt, raw, options, null); | ||||
|         toolResult.setModelResult(result); | ||||
|  | ||||
|         List<ToolDef> toolDefs = Utils.getObjectMapper().readValue(result.getResponse(), Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolDef.class)); | ||||
|         for (ToolDef toolDef : toolDefs) { | ||||
|             toolResults.put(toolDef, invokeTool(toolDef)); | ||||
|         } | ||||
|         toolResult.setToolResults(toolResults); | ||||
|         return toolResult; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||
|      * that can be used to check for status and get the response from the model later. This would be | ||||
|      * an async/non-blocking call. | ||||
|      * | ||||
|      * @param model  the ollama model to ask the question to | ||||
|      * @param prompt the prompt/question text | ||||
|      * @return the ollama async result callback handle | ||||
|      */ | ||||
|     public OllamaAsyncResultCallback generateAsync(String model, String prompt, boolean raw) { | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         URI uri = URI.create(this.host + "/api/generate"); | ||||
|         OllamaAsyncResultCallback ollamaAsyncResultCallback = | ||||
|                 new OllamaAsyncResultCallback( | ||||
|                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||
|         ollamaAsyncResultCallback.start(); | ||||
|         return ollamaAsyncResultCallback; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * With one or more image files, ask a question to a model running on Ollama server. This is a | ||||
|      * sync/blocking call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param imageFiles    the list of image files to use for the question | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles( | ||||
|             String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (File imageFile : imageFiles) { | ||||
|             images.add(encodeFileToBase64(imageFile)); | ||||
|         } | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles( | ||||
|             String model, String prompt, List<File> imageFiles, Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * With one or more image URLs, ask a question to a model running on Ollama server. This is a | ||||
|      * sync/blocking call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param imageURLs     the list of image URLs to use for the question | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs( | ||||
|             String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (String imageURL : imageURLs) { | ||||
|             images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); | ||||
|         } | ||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, | ||||
|                                               Options options) | ||||
|             throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         return generateWithImageURLs(model, prompt, imageURLs, options, null); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api | ||||
|      * 'api/chat'. | ||||
|      * | ||||
|      * @param model    the ollama model to ask the question to | ||||
|      * @param messages chat history / message stack to send to the model | ||||
|      * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      */ | ||||
|     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||
|         return chat(builder.withMessages(messages).build()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||
|      * <p> | ||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||
|      * | ||||
|      * @param request request object to be sent to the server | ||||
|      * @return | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return chat(request, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||
|      * <p> | ||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||
|      * | ||||
|      * @param request       request object to be sent to the server | ||||
|      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) | ||||
|      * @return | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         OllamaResult result; | ||||
|         if (streamHandler != null) { | ||||
|             request.setStream(true); | ||||
|             result = requestCaller.call(request, streamHandler); | ||||
|         } else { | ||||
|             result = requestCaller.callSync(request); | ||||
|         } | ||||
|         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); | ||||
|     } | ||||
|  | ||||
|     // technical private methods // | ||||
|  | ||||
|     private static String encodeFileToBase64(File file) throws IOException { | ||||
|         return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); | ||||
|     } | ||||
|  | ||||
|     private static String encodeByteArrayToBase64(byte[] bytes) { | ||||
|         return Base64.getEncoder().encodeToString(bytes); | ||||
|     } | ||||
|  | ||||
|     private OllamaResult generateSyncForOllamaRequestModel( | ||||
|             OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateEndpointCaller requestCaller = | ||||
|                 new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         OllamaResult result; | ||||
|         if (streamHandler != null) { | ||||
|             ollamaRequestModel.setStream(true); | ||||
|             result = requestCaller.call(ollamaRequestModel, streamHandler); | ||||
|         } else { | ||||
|             result = requestCaller.callSync(ollamaRequestModel); | ||||
|         } | ||||
|         return result; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Get default request builder. | ||||
|      * | ||||
|      * @param uri URI to get a HttpRequest.Builder | ||||
|      * @return HttpRequest.Builder | ||||
|      */ | ||||
|     private HttpRequest.Builder getRequestBuilderDefault(URI uri) { | ||||
|         HttpRequest.Builder requestBuilder = | ||||
|                 HttpRequest.newBuilder(uri) | ||||
|                         .header("Content-Type", "application/json") | ||||
|                         .timeout(Duration.ofSeconds(requestTimeoutSeconds)); | ||||
|         if (isBasicAuthCredentialsSet()) { | ||||
|             requestBuilder.header("Authorization", getBasicAuthHeaderValue()); | ||||
|         } | ||||
|         return requestBuilder; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Get basic authentication header value. | ||||
|      * | ||||
|      * @return basic authentication header value (encoded credentials) | ||||
|      */ | ||||
|     private String getBasicAuthHeaderValue() { | ||||
|         String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword(); | ||||
|         return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Check if Basic Auth credentials set. | ||||
|      * | ||||
|      * @return true when Basic Auth credentials set | ||||
|      */ | ||||
|     private boolean isBasicAuthCredentialsSet() { | ||||
|         return basicAuth != null; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     public void registerTool(MistralTools.ToolSpecification toolSpecification) { | ||||
|         ToolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||
|     } | ||||
|  | ||||
|     private Object invokeTool(ToolDef toolDef) { | ||||
|         try { | ||||
|             String methodName = toolDef.getName(); | ||||
|             Map<String, Object> arguments = toolDef.getArguments(); | ||||
|             DynamicFunction function = ToolRegistry.getFunction(methodName); | ||||
|             if (function == null) { | ||||
|                 throw new IllegalArgumentException("No such tool: " + methodName); | ||||
|             } | ||||
|             return function.apply(arguments); | ||||
|         } catch (Exception e) { | ||||
|             e.printStackTrace(); | ||||
|             return "Error calling tool: " + e.getMessage(); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -1,7 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core; | ||||
|  | ||||
| import java.util.function.Consumer; | ||||
|  | ||||
| public interface OllamaStreamHandler extends Consumer<String>{ | ||||
|     void accept(String message); | ||||
| } | ||||
| @@ -1,143 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import java.io.BufferedReader; | ||||
| import java.io.IOException; | ||||
| import java.io.InputStream; | ||||
| import java.io.InputStreamReader; | ||||
| import java.net.http.HttpClient; | ||||
| import java.net.http.HttpRequest; | ||||
| import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| import java.time.Duration; | ||||
| import java.util.LinkedList; | ||||
| import java.util.Queue; | ||||
| import lombok.Data; | ||||
| import lombok.EqualsAndHashCode; | ||||
| import lombok.Getter; | ||||
|  | ||||
| @Data | ||||
| @EqualsAndHashCode(callSuper = true) | ||||
| @SuppressWarnings("unused") | ||||
| public class OllamaAsyncResultCallback extends Thread { | ||||
|   private final HttpRequest.Builder requestBuilder; | ||||
|   private final OllamaGenerateRequestModel ollamaRequestModel; | ||||
|   private final Queue<String> queue = new LinkedList<>(); | ||||
|   private String result; | ||||
|   private boolean isDone; | ||||
|  | ||||
|   /** | ||||
|    * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a | ||||
|    * failure. If the request was a failure, the `getResponse()` method will return the error | ||||
|    * message. | ||||
|    */ | ||||
|   @Getter private boolean succeeded; | ||||
|  | ||||
|   private long requestTimeoutSeconds; | ||||
|  | ||||
|   /** | ||||
|    * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama | ||||
|    * server. | ||||
|    */ | ||||
|   @Getter private int httpStatusCode; | ||||
|  | ||||
|   /** -- GETTER -- Returns the response time in milliseconds. */ | ||||
|   @Getter private long responseTime = 0; | ||||
|  | ||||
|   public OllamaAsyncResultCallback( | ||||
|       HttpRequest.Builder requestBuilder, | ||||
|       OllamaGenerateRequestModel ollamaRequestModel, | ||||
|       long requestTimeoutSeconds) { | ||||
|     this.requestBuilder = requestBuilder; | ||||
|     this.ollamaRequestModel = ollamaRequestModel; | ||||
|     this.isDone = false; | ||||
|     this.result = ""; | ||||
|     this.queue.add(""); | ||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||
|   } | ||||
|  | ||||
|   @Override | ||||
|   public void run() { | ||||
|     HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|     try { | ||||
|       long startTime = System.currentTimeMillis(); | ||||
|       HttpRequest request = | ||||
|           requestBuilder | ||||
|               .POST( | ||||
|                   HttpRequest.BodyPublishers.ofString( | ||||
|                       Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) | ||||
|               .header("Content-Type", "application/json") | ||||
|               .timeout(Duration.ofSeconds(requestTimeoutSeconds)) | ||||
|               .build(); | ||||
|       HttpResponse<InputStream> response = | ||||
|           httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|       int statusCode = response.statusCode(); | ||||
|       this.httpStatusCode = statusCode; | ||||
|  | ||||
|       InputStream responseBodyStream = response.body(); | ||||
|       try (BufferedReader reader = | ||||
|           new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|         String line; | ||||
|         StringBuilder responseBuffer = new StringBuilder(); | ||||
|         while ((line = reader.readLine()) != null) { | ||||
|           if (statusCode == 404) { | ||||
|             OllamaErrorResponseModel ollamaResponseModel = | ||||
|                 Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); | ||||
|             queue.add(ollamaResponseModel.getError()); | ||||
|             responseBuffer.append(ollamaResponseModel.getError()); | ||||
|           } else { | ||||
|             OllamaGenerateResponseModel ollamaResponseModel = | ||||
|                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||
|             queue.add(ollamaResponseModel.getResponse()); | ||||
|             if (!ollamaResponseModel.isDone()) { | ||||
|               responseBuffer.append(ollamaResponseModel.getResponse()); | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|  | ||||
|         this.isDone = true; | ||||
|         this.succeeded = true; | ||||
|         this.result = responseBuffer.toString(); | ||||
|         long endTime = System.currentTimeMillis(); | ||||
|         responseTime = endTime - startTime; | ||||
|       } | ||||
|       if (statusCode != 200) { | ||||
|         throw new OllamaBaseException(this.result); | ||||
|       } | ||||
|     } catch (IOException | InterruptedException | OllamaBaseException e) { | ||||
|       this.isDone = true; | ||||
|       this.succeeded = false; | ||||
|       this.result = "[FAILED] " + e.getMessage(); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * Returns the status of the thread. This does not indicate that the request was successful or a | ||||
|    * failure, rather it is just a status flag to indicate if the thread is active or ended. | ||||
|    * | ||||
|    * @return boolean - status | ||||
|    */ | ||||
|   public boolean isComplete() { | ||||
|     return isDone; | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * Returns the final completion/response when the execution completes. Does not return intermediate results. | ||||
|    * | ||||
|    * @return String completion/response text | ||||
|    */ | ||||
|   public String getResponse() { | ||||
|     return result; | ||||
|   } | ||||
|  | ||||
|   public Queue<String> getStream() { | ||||
|     return queue; | ||||
|   } | ||||
|  | ||||
|   public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { | ||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||
|   } | ||||
| } | ||||
| @@ -1,19 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonValue; | ||||
|  | ||||
| /** | ||||
|  * Defines the possible Chat Message roles. | ||||
|  */ | ||||
| public enum OllamaChatMessageRole { | ||||
|     SYSTEM("system"), | ||||
|     USER("user"), | ||||
|     ASSISTANT("assistant"); | ||||
|  | ||||
|     @JsonValue | ||||
|     private String roleName; | ||||
|  | ||||
|     private OllamaChatMessageRole(String roleName){ | ||||
|         this.roleName = roleName; | ||||
|     } | ||||
| } | ||||
| @@ -1,46 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||
|  | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| import lombok.Getter; | ||||
| import lombok.Setter; | ||||
|  | ||||
| @Getter | ||||
| @Setter | ||||
| public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{ | ||||
|  | ||||
|   private String prompt; | ||||
|   private List<String> images; | ||||
|  | ||||
|   private String system; | ||||
|   private String context; | ||||
|   private boolean raw; | ||||
|  | ||||
|   public OllamaGenerateRequestModel() { | ||||
|   } | ||||
|  | ||||
|   public OllamaGenerateRequestModel(String model, String prompt) { | ||||
|     this.model = model; | ||||
|     this.prompt = prompt; | ||||
|   } | ||||
|  | ||||
|   public OllamaGenerateRequestModel(String model, String prompt, List<String> images) { | ||||
|     this.model = model; | ||||
|     this.prompt = prompt; | ||||
|     this.images = images; | ||||
|   } | ||||
|  | ||||
|     @Override | ||||
|   public boolean equals(Object o) { | ||||
|     if (!(o instanceof OllamaGenerateRequestModel)) { | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     return this.toString().equals(o.toString()); | ||||
|   } | ||||
|  | ||||
| } | ||||
| @@ -1,55 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
|  | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.IOException; | ||||
|  | ||||
| /** | ||||
|  * Specialization class for requests | ||||
|  */ | ||||
| public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||
|  | ||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); | ||||
|  | ||||
|     private OllamaChatStreamObserver streamObserver; | ||||
|  | ||||
|     public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     protected String getEndpointSuffix() { | ||||
|         return "/api/chat"; | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||
|         try { | ||||
|             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||
|             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); | ||||
|             if (streamObserver != null) { | ||||
|                 streamObserver.notify(ollamaResponseModel); | ||||
|             } | ||||
|             return ollamaResponseModel.isDone(); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             LOG.error("Error parsing the Ollama chat response!", e); | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         streamObserver = new OllamaChatStreamObserver(streamHandler); | ||||
|         return super.callSync(body); | ||||
|     } | ||||
| } | ||||
| @@ -1,16 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|  | ||||
| import java.util.Map; | ||||
|  | ||||
| @Data | ||||
| @NoArgsConstructor | ||||
| @AllArgsConstructor | ||||
| public class OllamaToolsResult { | ||||
|     private OllamaResult modelResult; | ||||
|     private Map<ToolDef, Object> toolResults; | ||||
| } | ||||
| @@ -1,17 +0,0 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
|  | ||||
| import java.util.HashMap; | ||||
| import java.util.Map; | ||||
|  | ||||
| public class ToolRegistry { | ||||
|     private static final Map<String, DynamicFunction> functionMap = new HashMap<>(); | ||||
|  | ||||
|  | ||||
|     public static DynamicFunction getFunction(String name) { | ||||
|         return functionMap.get(name); | ||||
|     } | ||||
|  | ||||
|     public static void addFunction(String name, DynamicFunction function) { | ||||
|         functionMap.put(name, function); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										886
									
								
								src/main/java/io/github/ollama4j/OllamaAPI.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										886
									
								
								src/main/java/io/github/ollama4j/OllamaAPI.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,886 @@ | ||||
| package io.github.ollama4j; | ||||
|  | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.exceptions.RoleNotFoundException; | ||||
| import io.github.ollama4j.exceptions.ToolInvocationException; | ||||
| import io.github.ollama4j.exceptions.ToolNotFoundException; | ||||
| import io.github.ollama4j.models.chat.*; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel; | ||||
| import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel; | ||||
| import io.github.ollama4j.models.generate.OllamaGenerateRequest; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| import io.github.ollama4j.models.ps.ModelsProcessResponse; | ||||
| import io.github.ollama4j.models.request.*; | ||||
| import io.github.ollama4j.models.response.*; | ||||
| import io.github.ollama4j.tools.*; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Setter; | ||||
|  | ||||
| import java.io.*; | ||||
| import java.net.URI; | ||||
| import java.net.URISyntaxException; | ||||
| import java.net.http.HttpClient; | ||||
| import java.net.http.HttpConnectTimeoutException; | ||||
| import java.net.http.HttpRequest; | ||||
| import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| import java.nio.file.Files; | ||||
| import java.time.Duration; | ||||
| import java.util.*; | ||||
| import java.util.stream.Collectors; | ||||
|  | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| import org.jsoup.Jsoup; | ||||
| import org.jsoup.nodes.Document; | ||||
| import org.jsoup.nodes.Element; | ||||
| import org.jsoup.select.Elements; | ||||
|  | ||||
| /** | ||||
|  * The base Ollama API class. | ||||
|  */ | ||||
| @SuppressWarnings({"DuplicatedCode", "resource"}) | ||||
| public class OllamaAPI { | ||||
|  | ||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||
|     private final String host; | ||||
|     /** | ||||
|      * -- SETTER -- | ||||
|      * Set request timeout in seconds. Default is 3 seconds. | ||||
|      */ | ||||
|     @Setter | ||||
|     private long requestTimeoutSeconds = 10; | ||||
|     /** | ||||
|      * -- SETTER -- | ||||
|      * Set/unset logging of responses | ||||
|      */ | ||||
|     @Setter | ||||
|     private boolean verbose = true; | ||||
|     private BasicAuth basicAuth; | ||||
|  | ||||
|     private final ToolRegistry toolRegistry = new ToolRegistry(); | ||||
|  | ||||
|     /** | ||||
|      * Instantiates the Ollama API with default Ollama host: <a href="http://localhost:11434">http://localhost:11434</a> | ||||
|      **/ | ||||
|     public OllamaAPI() { | ||||
|         this.host = "http://localhost:11434"; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Instantiates the Ollama API with specified Ollama host address. | ||||
|      * | ||||
|      * @param host the host address of Ollama server | ||||
|      */ | ||||
|     public OllamaAPI(String host) { | ||||
|         if (host.endsWith("/")) { | ||||
|             this.host = host.substring(0, host.length() - 1); | ||||
|         } else { | ||||
|             this.host = host; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. | ||||
|      * | ||||
|      * @param username the username | ||||
|      * @param password the password | ||||
|      */ | ||||
|     public void setBasicAuth(String username, String password) { | ||||
|         this.basicAuth = new BasicAuth(username, password); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * API to check the reachability of Ollama server. | ||||
|      * | ||||
|      * @return true if the server is reachable, false otherwise. | ||||
|      */ | ||||
|     public boolean ping() { | ||||
|         String url = this.host + "/api/tags"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = null; | ||||
|         try { | ||||
|             httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         } catch (URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|         HttpResponse<String> response = null; | ||||
|         try { | ||||
|             response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         } catch (HttpConnectTimeoutException e) { | ||||
|             return false; | ||||
|         } catch (IOException | InterruptedException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|         int statusCode = response.statusCode(); | ||||
|         return statusCode == 200; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Provides a list of running models and details about each model currently loaded into memory. | ||||
|      * | ||||
|      * @return ModelsProcessResponse containing details about the running models | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      */ | ||||
|     public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException { | ||||
|         String url = this.host + "/api/ps"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = null; | ||||
|         try { | ||||
|             httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         } catch (URISyntaxException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|         HttpResponse<String> response = null; | ||||
|         response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper().readValue(responseString, ModelsProcessResponse.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Lists available models from the Ollama server. | ||||
|      * | ||||
|      * @return a list of models available on the server | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public List<Model> listModels() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/tags"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper().readValue(responseString, ListModelsResponse.class).getModels(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Retrieves a list of models from the Ollama library. This method fetches the available models directly from Ollama | ||||
|      * library page, including model details such as the name, pull count, popular tags, tag count, and the time when model was updated. | ||||
|      * | ||||
|      * @return A list of {@link LibraryModel} objects representing the models available in the Ollama library. | ||||
|      * @throws OllamaBaseException  If the HTTP request fails or the response is not successful (non-200 status code). | ||||
|      * @throws IOException          If an I/O error occurs during the HTTP request or response processing. | ||||
|      * @throws InterruptedException If the thread executing the request is interrupted. | ||||
|      * @throws URISyntaxException   If there is an error creating the URI for the HTTP request. | ||||
|      */ | ||||
|     public List<LibraryModel> listModelsFromLibrary() throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = "https://ollama.com/library"; | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         List<LibraryModel> models = new ArrayList<>(); | ||||
|         if (statusCode == 200) { | ||||
|             Document doc = Jsoup.parse(responseString); | ||||
|             Elements modelSections = doc.selectXpath("//*[@id='repo']/ul/li/a"); | ||||
|             for (Element e : modelSections) { | ||||
|                 LibraryModel model = new LibraryModel(); | ||||
|                 Elements names = e.select("div > h2 > div > span"); | ||||
|                 Elements desc = e.select("div > p"); | ||||
|                 Elements pullCounts = e.select("div:nth-of-type(2) > p > span:first-of-type > span:first-of-type"); | ||||
|                 Elements popularTags = e.select("div > div > span"); | ||||
|                 Elements totalTags = e.select("div:nth-of-type(2) > p > span:nth-of-type(2) > span:first-of-type"); | ||||
|                 Elements lastUpdatedTime = e.select("div:nth-of-type(2) > p > span:nth-of-type(3) > span:nth-of-type(2)"); | ||||
|  | ||||
|                 if (names.first() == null || names.isEmpty()) { | ||||
|                     // if name cannot be extracted, skip. | ||||
|                     continue; | ||||
|                 } | ||||
|                 Optional.ofNullable(names.first()).map(Element::text).ifPresent(model::setName); | ||||
|                 model.setDescription(Optional.ofNullable(desc.first()).map(Element::text).orElse("")); | ||||
|                 model.setPopularTags(Optional.of(popularTags).map(tags -> tags.stream().map(Element::text).collect(Collectors.toList())).orElse(new ArrayList<>())); | ||||
|                 model.setPullCount(Optional.ofNullable(pullCounts.first()).map(Element::text).orElse("")); | ||||
|                 model.setTotalTags(Optional.ofNullable(totalTags.first()).map(Element::text).map(Integer::parseInt).orElse(0)); | ||||
|                 model.setLastUpdated(Optional.ofNullable(lastUpdatedTime.first()).map(Element::text).orElse("")); | ||||
|  | ||||
|                 models.add(model); | ||||
|             } | ||||
|             return models; | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetches the tags associated with a specific model from Ollama library. | ||||
|      * This method fetches the available model tags directly from Ollama library model page, including model tag name, size and time when model was last updated | ||||
|      * into a list of {@link LibraryModelTag} objects. | ||||
|      * | ||||
|      * @param libraryModel the {@link LibraryModel} object which contains the name of the library model | ||||
|      *                     for which the tags need to be fetched. | ||||
|      * @return a list of {@link LibraryModelTag} objects containing the extracted tags and their associated metadata. | ||||
|      * @throws OllamaBaseException  if the HTTP response status code indicates an error (i.e., not 200 OK), | ||||
|      *                              or if there is any other issue during the request or response processing. | ||||
|      * @throws IOException          if an input/output exception occurs during the HTTP request or response handling. | ||||
|      * @throws InterruptedException if the thread is interrupted while waiting for the HTTP response. | ||||
|      * @throws URISyntaxException   if the URI format is incorrect or invalid. | ||||
|      */ | ||||
|     public LibraryModelDetail getLibraryModelDetails(LibraryModel libraryModel) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         String url = String.format("https://ollama.com/library/%s/tags", libraryModel.getName()); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest httpRequest = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").GET().build(); | ||||
|         HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|  | ||||
|         List<LibraryModelTag> libraryModelTags = new ArrayList<>(); | ||||
|         if (statusCode == 200) { | ||||
|             Document doc = Jsoup.parse(responseString); | ||||
|             Elements tagSections = doc.select("html > body > main > div > section > div > div > div:nth-child(n+2) > div"); | ||||
|             for (Element e : tagSections) { | ||||
|                 Elements tags = e.select("div > a > div"); | ||||
|                 Elements tagsMetas = e.select("div > span"); | ||||
|  | ||||
|                 LibraryModelTag libraryModelTag = new LibraryModelTag(); | ||||
|  | ||||
|                 if (tags.first() == null || tags.isEmpty()) { | ||||
|                     // if tag cannot be extracted, skip. | ||||
|                     continue; | ||||
|                 } | ||||
|                 libraryModelTag.setName(libraryModel.getName()); | ||||
|                 Optional.ofNullable(tags.first()).map(Element::text).ifPresent(libraryModelTag::setTag); | ||||
|                 libraryModelTag.setSize(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[1].trim()).orElse("")); | ||||
|                 libraryModelTag.setLastUpdated(Optional.ofNullable(tagsMetas.first()).map(element -> element.text().split("•")).filter(parts -> parts.length > 1).map(parts -> parts[2].trim()).orElse("")); | ||||
|                 libraryModelTags.add(libraryModelTag); | ||||
|             } | ||||
|             LibraryModelDetail libraryModelDetail = new LibraryModelDetail(); | ||||
|             libraryModelDetail.setModel(libraryModel); | ||||
|             libraryModelDetail.setTags(libraryModelTags); | ||||
|             return libraryModelDetail; | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Finds a specific model using model name and tag from Ollama library. | ||||
|      * <p> | ||||
|      * This method retrieves the model from the Ollama library by its name, then fetches its tags. | ||||
|      * It searches through the tags of the model to find one that matches the specified tag name. | ||||
|      * If the model or the tag is not found, it throws a {@link NoSuchElementException}. | ||||
|      * | ||||
|      * @param modelName The name of the model to search for in the library. | ||||
|      * @param tag       The tag name to search for within the specified model. | ||||
|      * @return The {@link LibraryModelTag} associated with the specified model and tag. | ||||
|      * @throws OllamaBaseException    If there is a problem with the Ollama library operations. | ||||
|      * @throws IOException            If an I/O error occurs during the operation. | ||||
|      * @throws URISyntaxException     If there is an error with the URI syntax. | ||||
|      * @throws InterruptedException   If the operation is interrupted. | ||||
|      * @throws NoSuchElementException If the model or the tag is not found. | ||||
|      */ | ||||
|     public LibraryModelTag findModelTagFromLibrary(String modelName, String tag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         List<LibraryModel> libraryModels = this.listModelsFromLibrary(); | ||||
|         LibraryModel libraryModel = libraryModels.stream().filter(model -> model.getName().equals(modelName)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Model by name '%s' not found", modelName))); | ||||
|         LibraryModelDetail libraryModelDetail = this.getLibraryModelDetails(libraryModel); | ||||
|         LibraryModelTag libraryModelTag = libraryModelDetail.getTags().stream().filter(tagName -> tagName.getTag().equals(tag)).findFirst().orElseThrow(() -> new NoSuchElementException(String.format("Tag '%s' for model '%s' not found", tag, modelName))); | ||||
|         return libraryModelTag; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Pull a model on the Ollama server from the list of <a | ||||
|      * href="https://ollama.ai/library">available models</a>. | ||||
|      * | ||||
|      * @param modelName the name of the model | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void pullModel(String modelName) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         String url = this.host + "/api/pull"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).POST(HttpRequest.BodyPublishers.ofString(jsonData)).header("Accept", "application/json").header("Content-type", "application/json").build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         InputStream responseBodyStream = response.body(); | ||||
|         String responseString = ""; | ||||
|         try (BufferedReader reader = new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|             String line; | ||||
|             while ((line = reader.readLine()) != null) { | ||||
|                 ModelPullResponse modelPullResponse = Utils.getObjectMapper().readValue(line, ModelPullResponse.class); | ||||
|                 if (verbose) { | ||||
|                     logger.info(modelPullResponse.getStatus()); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Pulls a model using the specified Ollama library model tag. | ||||
|      * The model is identified by a name and a tag, which are combined into a single identifier | ||||
|      * in the format "name:tag" to pull the corresponding model. | ||||
|      * | ||||
|      * @param libraryModelTag the {@link LibraryModelTag} object containing the name and tag | ||||
|      *                        of the model to be pulled. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void pullModel(LibraryModelTag libraryModelTag) throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { | ||||
|         String tagToPull = String.format("%s:%s", libraryModelTag.getName(), libraryModelTag.getTag()); | ||||
|         pullModel(tagToPull); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Gets model details from the Ollama server. | ||||
|      * | ||||
|      * @param modelName the model | ||||
|      * @return the model details | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public ModelDetail getModelDetails(String modelName) throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { | ||||
|         String url = this.host + "/api/show"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Create a custom model from a model file. Read more about custom model file creation <a | ||||
|      * href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>. | ||||
|      * | ||||
|      * @param modelName     the name of the custom model to be created. | ||||
|      * @param modelFilePath the path to model file that exists on the Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void createModelWithFilePath(String modelName, String modelFilePath) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|         // FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this | ||||
|         // if the issue is fixed in the Ollama API server. | ||||
|         if (responseString.contains("error")) { | ||||
|             throw new OllamaBaseException(responseString); | ||||
|         } | ||||
|         if (verbose) { | ||||
|             logger.info(responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Create a custom model from a model file. Read more about custom model file creation <a | ||||
|      * href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md">here</a>. | ||||
|      * | ||||
|      * @param modelName         the name of the custom model to be created. | ||||
|      * @param modelFileContents the path to model file that exists on the Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void createModelWithModelFileContents(String modelName, String modelFileContents) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/create"; | ||||
|         String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).header("Accept", "application/json").header("Content-Type", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseString = response.body(); | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseString); | ||||
|         } | ||||
|         if (responseString.contains("error")) { | ||||
|             throw new OllamaBaseException(responseString); | ||||
|         } | ||||
|         if (verbose) { | ||||
|             logger.info(responseString); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Delete a model from Ollama server. | ||||
|      * | ||||
|      * @param modelName          the name of the model to be deleted. | ||||
|      * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { | ||||
|         String url = this.host + "/api/delete"; | ||||
|         String jsonData = new ModelRequest(modelName).toString(); | ||||
|         HttpRequest request = getRequestBuilderDefault(new URI(url)).method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)).header("Accept", "application/json").header("Content-type", "application/json").build(); | ||||
|         HttpClient client = HttpClient.newHttpClient(); | ||||
|         HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) { | ||||
|             return; | ||||
|         } | ||||
|         if (statusCode != 200) { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings for a given text from a model | ||||
|      * | ||||
|      * @param model  name of model to generate embeddings from | ||||
|      * @param prompt text to generate embeddings for | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @deprecated Use {@link #embed(String, List)} instead. | ||||
|      */ | ||||
|     @Deprecated | ||||
|     public List<Double> generateEmbeddings(String model, String prompt) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. | ||||
|      * | ||||
|      * @param modelRequest request for '/api/embeddings' endpoint | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead. | ||||
|      */ | ||||
|     @Deprecated | ||||
|     public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         URI uri = URI.create(this.host + "/api/embeddings"); | ||||
|         String jsonData = modelRequest.toString(); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)); | ||||
|         HttpRequest request = requestBuilder.build(); | ||||
|         HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|         if (statusCode == 200) { | ||||
|             OllamaEmbeddingResponseModel embeddingResponse = Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); | ||||
|             return embeddingResponse.getEmbedding(); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings for a given text from a model | ||||
|      * | ||||
|      * @param model  name of model to generate embeddings from | ||||
|      * @param inputs text/s to generate embeddings for | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaEmbedResponseModel embed(String model, List<String> inputs) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         return embed(new OllamaEmbedRequestModel(model, inputs)); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate embeddings using a {@link OllamaEmbedRequestModel}. | ||||
|      * | ||||
|      * @param modelRequest request for '/api/embed' endpoint | ||||
|      * @return embeddings | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||
|         URI uri = URI.create(this.host + "/api/embed"); | ||||
|         String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|  | ||||
|         HttpRequest request = HttpRequest.newBuilder(uri).header("Accept", "application/json").POST(HttpRequest.BodyPublishers.ofString(jsonData)).build(); | ||||
|  | ||||
|         HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); | ||||
|         int statusCode = response.statusCode(); | ||||
|         String responseBody = response.body(); | ||||
|  | ||||
|         if (statusCode == 200) { | ||||
|             return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class); | ||||
|         } else { | ||||
|             throw new OllamaBaseException(statusCode + " - " + responseBody); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server. This is a sync/blocking | ||||
|      * call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generates response using the specified AI model and prompt (in blocking mode). | ||||
|      * <p> | ||||
|      * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @param model   The name or identifier of the AI model to use for generating the response. | ||||
|      * @param prompt  The input text or prompt to provide to the AI model. | ||||
|      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||
|      * @param options Additional options or configurations to use when generating the response. | ||||
|      * @return {@link OllamaResult} | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generate(String model, String prompt, boolean raw, Options options) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generate(model, prompt, raw, options, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools | ||||
|      * on the generated response. | ||||
|      * | ||||
|      * @param model   The name or identifier of the AI model to use for generating the response. | ||||
|      * @param prompt  The input text or prompt to provide to the AI model. | ||||
|      * @param options Additional options or configurations to use when generating the response. | ||||
|      * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaToolsResult generateWithTools(String model, String prompt, Options options) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||
|         boolean raw = true; | ||||
|         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||
|         Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>(); | ||||
|  | ||||
|         OllamaResult result = generate(model, prompt, raw, options, null); | ||||
|         toolResult.setModelResult(result); | ||||
|  | ||||
|         String toolsResponse = result.getResponse(); | ||||
|         if (toolsResponse.contains("[TOOL_CALLS]")) { | ||||
|             toolsResponse = toolsResponse.replace("[TOOL_CALLS]", ""); | ||||
|         } | ||||
|  | ||||
|         List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class)); | ||||
|         for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) { | ||||
|             toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec)); | ||||
|         } | ||||
|         toolResult.setToolResults(toolResults); | ||||
|         return toolResult; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||
|      * that can be used to check for status and get the response from the model later. This would be | ||||
|      * an async/non-blocking call. | ||||
|      * | ||||
|      * @param model  the ollama model to ask the question to | ||||
|      * @param prompt the prompt/question text | ||||
|      * @return the ollama async result callback handle | ||||
|      */ | ||||
|     public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) { | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt); | ||||
|         ollamaRequestModel.setRaw(raw); | ||||
|         URI uri = URI.create(this.host + "/api/generate"); | ||||
|         OllamaAsyncResultStreamer ollamaAsyncResultStreamer = new OllamaAsyncResultStreamer(getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||
|         ollamaAsyncResultStreamer.start(); | ||||
|         return ollamaAsyncResultStreamer; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * With one or more image files, ask a question to a model running on Ollama server. This is a | ||||
|      * sync/blocking call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param imageFiles    the list of image files to use for the question | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (File imageFile : imageFiles) { | ||||
|             images.add(encodeFileToBase64(imageFile)); | ||||
|         } | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * With one or more image URLs, ask a question to a model running on Ollama server. This is a | ||||
|      * sync/blocking call. | ||||
|      * | ||||
|      * @param model         the ollama model to ask the question to | ||||
|      * @param prompt        the prompt/question text | ||||
|      * @param imageURLs     the list of image URLs to use for the question | ||||
|      * @param options       the Options object - <a | ||||
|      *                      href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More | ||||
|      *                      details on the options</a> | ||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||
|      * @return OllamaResult that includes response text and time taken for response | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         List<String> images = new ArrayList<>(); | ||||
|         for (String imageURL : imageURLs) { | ||||
|             images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); | ||||
|         } | ||||
|         OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt, images); | ||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||
|         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Convenience method to call Ollama API without streaming responses. | ||||
|      * <p> | ||||
|      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||
|      * | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      * @throws URISyntaxException   if the URI for the request is malformed | ||||
|      */ | ||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { | ||||
|         return generateWithImageURLs(model, prompt, imageURLs, options, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api | ||||
|      * 'api/chat'. | ||||
|      * | ||||
|      * @param model    the ollama model to ask the question to | ||||
|      * @param messages chat history / message stack to send to the model | ||||
|      * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||
|         return chat(builder.withMessages(messages).build()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||
|      * <p> | ||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||
|      * | ||||
|      * @param request request object to be sent to the server | ||||
|      * @return {@link OllamaChatResult} | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequest request) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         return chat(request, null); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Ask a question to a model using an {@link OllamaChatRequest}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||
|      * <p> | ||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||
|      * | ||||
|      * @param request       request object to be sent to the server | ||||
|      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) | ||||
|      * @return {@link OllamaChatResult} | ||||
|      * @throws OllamaBaseException  any response code than 200 has been returned | ||||
|      * @throws IOException          in case the responseStream can not be read | ||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||
|      * @throws OllamaBaseException  if the response indicates an error status | ||||
|      * @throws IOException          if an I/O error occurs during the HTTP request | ||||
|      * @throws InterruptedException if the operation is interrupted | ||||
|      */ | ||||
|     public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         OllamaResult result; | ||||
|         if (streamHandler != null) { | ||||
|             request.setStream(true); | ||||
|             result = requestCaller.call(request, streamHandler); | ||||
|         } else { | ||||
|             result = requestCaller.callSync(request); | ||||
|         } | ||||
|         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); | ||||
|     } | ||||
|  | ||||
|     public void registerTool(Tools.ToolSpecification toolSpecification) { | ||||
|         toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Adds a custom role. | ||||
|      * | ||||
|      * @param roleName the name of the custom role to be added | ||||
|      * @return the newly created OllamaChatMessageRole | ||||
|      */ | ||||
|     public OllamaChatMessageRole addCustomRole(String roleName) { | ||||
|         return OllamaChatMessageRole.newCustomRole(roleName); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Lists all available roles. | ||||
|      * | ||||
|      * @return a list of available OllamaChatMessageRole objects | ||||
|      */ | ||||
|     public List<OllamaChatMessageRole> listRoles() { | ||||
|         return OllamaChatMessageRole.getRoles(); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Retrieves a specific role by name. | ||||
|      * | ||||
|      * @param roleName the name of the role to retrieve | ||||
|      * @return the OllamaChatMessageRole associated with the given name | ||||
|      * @throws RoleNotFoundException if the role with the specified name does not exist | ||||
|      */ | ||||
|     public OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { | ||||
|         return OllamaChatMessageRole.getRole(roleName); | ||||
|     } | ||||
|  | ||||
|  | ||||
|     // technical private methods // | ||||
|  | ||||
|     private static String encodeFileToBase64(File file) throws IOException { | ||||
|         return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); | ||||
|     } | ||||
|  | ||||
|     private static String encodeByteArrayToBase64(byte[] bytes) { | ||||
|         return Base64.getEncoder().encodeToString(bytes); | ||||
|     } | ||||
|  | ||||
|     private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||
|         OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|         OllamaResult result; | ||||
|         if (streamHandler != null) { | ||||
|             ollamaRequestModel.setStream(true); | ||||
|             result = requestCaller.call(ollamaRequestModel, streamHandler); | ||||
|         } else { | ||||
|             result = requestCaller.callSync(ollamaRequestModel); | ||||
|         } | ||||
|         return result; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Get default request builder. | ||||
|      * | ||||
|      * @param uri URI to get a HttpRequest.Builder | ||||
|      * @return HttpRequest.Builder | ||||
|      */ | ||||
|     private HttpRequest.Builder getRequestBuilderDefault(URI uri) { | ||||
|         HttpRequest.Builder requestBuilder = HttpRequest.newBuilder(uri).header("Content-Type", "application/json").timeout(Duration.ofSeconds(requestTimeoutSeconds)); | ||||
|         if (isBasicAuthCredentialsSet()) { | ||||
|             requestBuilder.header("Authorization", getBasicAuthHeaderValue()); | ||||
|         } | ||||
|         return requestBuilder; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Get basic authentication header value. | ||||
|      * | ||||
|      * @return basic authentication header value (encoded credentials) | ||||
|      */ | ||||
|     private String getBasicAuthHeaderValue() { | ||||
|         String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword(); | ||||
|         return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Check if Basic Auth credentials set. | ||||
|      * | ||||
|      * @return true when Basic Auth credentials set | ||||
|      */ | ||||
|     private boolean isBasicAuthCredentialsSet() { | ||||
|         return basicAuth != null; | ||||
|     } | ||||
|  | ||||
|     private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { | ||||
|         try { | ||||
|             String methodName = toolFunctionCallSpec.getName(); | ||||
|             Map<String, Object> arguments = toolFunctionCallSpec.getArguments(); | ||||
|             ToolFunction function = toolRegistry.getFunction(methodName); | ||||
|             if (verbose) { | ||||
|                 logger.debug("Invoking function {} with arguments {}", methodName, arguments); | ||||
|             } | ||||
|             if (function == null) { | ||||
|                 throw new ToolNotFoundException("No such tool: " + methodName); | ||||
|             } | ||||
|             return function.apply(arguments); | ||||
|         } catch (Exception e) { | ||||
|             throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.exceptions; | ||||
| package io.github.ollama4j.exceptions; | ||||
| 
 | ||||
| public class OllamaBaseException extends Exception { | ||||
| 
 | ||||
| @@ -0,0 +1,8 @@ | ||||
| package io.github.ollama4j.exceptions; | ||||
|  | ||||
| public class RoleNotFoundException extends Exception { | ||||
|  | ||||
|     public RoleNotFoundException(String s) { | ||||
|         super(s); | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,8 @@ | ||||
| package io.github.ollama4j.exceptions; | ||||
|  | ||||
| public class ToolInvocationException extends Exception { | ||||
|  | ||||
|     public ToolInvocationException(String s, Exception e) { | ||||
|         super(s, e); | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,8 @@ | ||||
| package io.github.ollama4j.exceptions; | ||||
|  | ||||
| public class ToolNotFoundException extends Exception { | ||||
|  | ||||
|     public ToolNotFoundException(String s) { | ||||
|         super(s); | ||||
|     } | ||||
| } | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.impl; | ||||
| package io.github.ollama4j.impl; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| 
 | ||||
| public class ConsoleOutputStreamHandler implements OllamaStreamHandler { | ||||
|     private final StringBuffer response = new StringBuffer(); | ||||
| @@ -1,13 +1,14 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.FileToBase64Serializer; | ||||
| import io.github.ollama4j.utils.FileToBase64Serializer; | ||||
| 
 | ||||
| import java.util.List; | ||||
| 
 | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
| @@ -33,13 +34,13 @@ public class OllamaChatMessage { | ||||
| 
 | ||||
|     @JsonSerialize(using = FileToBase64Serializer.class) | ||||
|     private List<byte[]> images; | ||||
|      | ||||
|       @Override | ||||
|   public String toString() { | ||||
|     try { | ||||
|       return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||
|     } catch (JsonProcessingException e) { | ||||
|       throw new RuntimeException(e); | ||||
| 
 | ||||
|     @Override | ||||
|     public String toString() { | ||||
|         try { | ||||
|             return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|     } | ||||
|   } | ||||
| } | ||||
| @@ -0,0 +1,53 @@ | ||||
| package io.github.ollama4j.models.chat; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonValue; | ||||
| import io.github.ollama4j.exceptions.RoleNotFoundException; | ||||
| import lombok.Getter; | ||||
|  | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Defines the possible Chat Message roles. | ||||
|  */ | ||||
| @Getter | ||||
| public class OllamaChatMessageRole { | ||||
|     private static final List<OllamaChatMessageRole> roles = new ArrayList<>(); | ||||
|  | ||||
|     public static final OllamaChatMessageRole SYSTEM = new OllamaChatMessageRole("system"); | ||||
|     public static final OllamaChatMessageRole USER = new OllamaChatMessageRole("user"); | ||||
|     public static final OllamaChatMessageRole ASSISTANT = new OllamaChatMessageRole("assistant"); | ||||
|     public static final OllamaChatMessageRole TOOL = new OllamaChatMessageRole("tool"); | ||||
|  | ||||
|     @JsonValue | ||||
|     private final String roleName; | ||||
|  | ||||
|     private OllamaChatMessageRole(String roleName) { | ||||
|         this.roleName = roleName; | ||||
|         roles.add(this); | ||||
|     } | ||||
|  | ||||
|     public static OllamaChatMessageRole newCustomRole(String roleName) { | ||||
|         OllamaChatMessageRole customRole = new OllamaChatMessageRole(roleName); | ||||
|         roles.add(customRole); | ||||
|         return customRole; | ||||
|     } | ||||
|  | ||||
|     public static List<OllamaChatMessageRole> getRoles() { | ||||
|         return new ArrayList<>(roles); | ||||
|     } | ||||
|  | ||||
|     public static OllamaChatMessageRole getRole(String roleName) throws RoleNotFoundException { | ||||
|         for (OllamaChatMessageRole role : roles) { | ||||
|             if (role.roleName.equals(roleName)) { | ||||
|                 return role; | ||||
|             } | ||||
|         } | ||||
|         throw new RoleNotFoundException("Invalid role name: " + roleName); | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     public String toString() { | ||||
|         return roleName; | ||||
|     } | ||||
| } | ||||
| @@ -1,8 +1,9 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import java.util.List; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
| 
 | ||||
| import io.github.ollama4j.models.request.OllamaCommonRequest; | ||||
| import io.github.ollama4j.utils.OllamaRequestBody; | ||||
| 
 | ||||
| import lombok.Getter; | ||||
| import lombok.Setter; | ||||
| @@ -16,20 +17,20 @@ import lombok.Setter; | ||||
|  */ | ||||
| @Getter | ||||
| @Setter | ||||
| public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody { | ||||
| public class OllamaChatRequest extends OllamaCommonRequest implements OllamaRequestBody { | ||||
| 
 | ||||
|   private List<OllamaChatMessage> messages; | ||||
| 
 | ||||
|   public OllamaChatRequestModel() {} | ||||
|   public OllamaChatRequest() {} | ||||
| 
 | ||||
|   public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) { | ||||
|   public OllamaChatRequest(String model, List<OllamaChatMessage> messages) { | ||||
|     this.model = model; | ||||
|     this.messages = messages; | ||||
|   } | ||||
| 
 | ||||
|   @Override | ||||
|   public boolean equals(Object o) { | ||||
|     if (!(o instanceof OllamaChatRequestModel)) { | ||||
|     if (!(o instanceof OllamaChatRequest)) { | ||||
|       return false; | ||||
|     } | ||||
| 
 | ||||
| @@ -1,4 +1,9 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import io.github.ollama4j.utils.Options; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| 
 | ||||
| import java.io.File; | ||||
| import java.io.IOException; | ||||
| @@ -8,101 +13,92 @@ import java.util.ArrayList; | ||||
| import java.util.List; | ||||
| import java.util.stream.Collectors; | ||||
| 
 | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| 
 | ||||
| /** | ||||
|  * Helper class for creating {@link OllamaChatRequestModel} objects using the builder-pattern. | ||||
|  * Helper class for creating {@link OllamaChatRequest} objects using the builder-pattern. | ||||
|  */ | ||||
| public class OllamaChatRequestBuilder { | ||||
| 
 | ||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatRequestBuilder.class); | ||||
| 
 | ||||
|     private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages){ | ||||
|         request = new OllamaChatRequestModel(model, messages); | ||||
|     private OllamaChatRequestBuilder(String model, List<OllamaChatMessage> messages) { | ||||
|         request = new OllamaChatRequest(model, messages); | ||||
|     } | ||||
| 
 | ||||
|     private OllamaChatRequestModel request; | ||||
|     private OllamaChatRequest request; | ||||
| 
 | ||||
|     public static OllamaChatRequestBuilder getInstance(String model){ | ||||
|     public static OllamaChatRequestBuilder getInstance(String model) { | ||||
|         return new OllamaChatRequestBuilder(model, new ArrayList<>()); | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestModel build(){ | ||||
|     public OllamaChatRequest build() { | ||||
|         return request; | ||||
|     } | ||||
| 
 | ||||
|     public void reset(){ | ||||
|         request = new OllamaChatRequestModel(request.getModel(), new ArrayList<>()); | ||||
|     public void reset() { | ||||
|         request = new OllamaChatRequest(request.getModel(), new ArrayList<>()); | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images){ | ||||
|     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, List<File> images) { | ||||
|         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||
| 
 | ||||
|         List<byte[]> binaryImages = images.stream().map(file -> { | ||||
|             try { | ||||
|                 return Files.readAllBytes(file.toPath()); | ||||
|             } catch (IOException e) { | ||||
|                 LOG.warn(String.format("File '%s' could not be accessed, will not add to message!",file.toPath()), e); | ||||
|                 LOG.warn("File '{}' could not be accessed, will not add to message!", file.toPath(), e); | ||||
|                 return new byte[0]; | ||||
|             } | ||||
|         }).collect(Collectors.toList()); | ||||
| 
 | ||||
|         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||
|         messages.add(new OllamaChatMessage(role, content, binaryImages)); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls){ | ||||
|     public OllamaChatRequestBuilder withMessage(OllamaChatMessageRole role, String content, String... imageUrls) { | ||||
|         List<OllamaChatMessage> messages = this.request.getMessages(); | ||||
|         List<byte[]> binaryImages = null; | ||||
|         if(imageUrls.length>0){ | ||||
|         if (imageUrls.length > 0) { | ||||
|             binaryImages = new ArrayList<>(); | ||||
|             for (String imageUrl : imageUrls) { | ||||
|                 try{ | ||||
|                 try { | ||||
|                     binaryImages.add(Utils.loadImageBytesFromUrl(imageUrl)); | ||||
|                 } | ||||
|                     catch (URISyntaxException e){ | ||||
|                         LOG.warn(String.format("URL '%s' could not be accessed, will not add to message!",imageUrl), e); | ||||
|                 } | ||||
|                 catch (IOException e){ | ||||
|                     LOG.warn(String.format("Content of URL '%s' could not be read, will not add to message!",imageUrl), e); | ||||
|                 } catch (URISyntaxException e) { | ||||
|                     LOG.warn("URL '{}' could not be accessed, will not add to message!", imageUrl, e); | ||||
|                 } catch (IOException e) { | ||||
|                     LOG.warn("Content of URL '{}' could not be read, will not add to message!", imageUrl, e); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|          | ||||
|         messages.add(new OllamaChatMessage(role,content,binaryImages)); | ||||
| 
 | ||||
|         messages.add(new OllamaChatMessage(role, content, binaryImages)); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages){ | ||||
|         this.request.getMessages().addAll(messages); | ||||
|         return this; | ||||
|     public OllamaChatRequestBuilder withMessages(List<OllamaChatMessage> messages) { | ||||
|         return new OllamaChatRequestBuilder(request.getModel(), messages); | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withOptions(Options options){ | ||||
|     public OllamaChatRequestBuilder withOptions(Options options) { | ||||
|         this.request.setOptions(options.getOptionsMap()); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withGetJsonResponse(){ | ||||
|     public OllamaChatRequestBuilder withGetJsonResponse() { | ||||
|         this.request.setReturnFormatJson(true); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withTemplate(String template){ | ||||
|     public OllamaChatRequestBuilder withTemplate(String template) { | ||||
|         this.request.setTemplate(template); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withStreaming(){ | ||||
|     public OllamaChatRequestBuilder withStreaming() { | ||||
|         this.request.setStream(true); | ||||
|         return this; | ||||
|     } | ||||
| 
 | ||||
|     public OllamaChatRequestBuilder withKeepAlive(String keepAlive){ | ||||
|     public OllamaChatRequestBuilder withKeepAlive(String keepAlive) { | ||||
|         this.request.setKeepAlive(keepAlive); | ||||
|         return this; | ||||
|     } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import lombok.Data; | ||||
| @@ -1,19 +1,18 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import java.util.List; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| 
 | ||||
| /** | ||||
|  * Specific chat-API result that contains the chat history sent to the model and appends the answer as {@link OllamaChatResult} given by the | ||||
|  * {@link OllamaChatMessageRole#ASSISTANT} role. | ||||
|  */ | ||||
| public class OllamaChatResult extends OllamaResult{ | ||||
| public class OllamaChatResult extends OllamaResult { | ||||
| 
 | ||||
|     private List<OllamaChatMessage> chatHistory; | ||||
| 
 | ||||
|     public OllamaChatResult(String response, long responseTime, int httpStatusCode, | ||||
|             List<OllamaChatMessage> chatHistory) { | ||||
|     public OllamaChatResult(String response, long responseTime, int httpStatusCode, List<OllamaChatMessage> chatHistory) { | ||||
|         super(response, responseTime, httpStatusCode); | ||||
|         this.chatHistory = chatHistory; | ||||
|         appendAnswerToChatHistory(response); | ||||
| @@ -21,12 +20,10 @@ public class OllamaChatResult extends OllamaResult{ | ||||
| 
 | ||||
|     public List<OllamaChatMessage> getChatHistory() { | ||||
|         return chatHistory; | ||||
|     }  | ||||
|     } | ||||
| 
 | ||||
|     private void appendAnswerToChatHistory(String answer){ | ||||
|     private void appendAnswerToChatHistory(String answer) { | ||||
|         OllamaChatMessage assistantMessage = new OllamaChatMessage(OllamaChatMessageRole.ASSISTANT, answer); | ||||
|         this.chatHistory.add(assistantMessage); | ||||
|     } | ||||
|      | ||||
|      | ||||
| } | ||||
| @@ -1,10 +1,10 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||
| package io.github.ollama4j.models.chat; | ||||
| 
 | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| 
 | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| 
 | ||||
| public class OllamaChatStreamObserver { | ||||
| 
 | ||||
|     private OllamaStreamHandler streamHandler; | ||||
| @@ -17,12 +17,12 @@ public class OllamaChatStreamObserver { | ||||
|         this.streamHandler = streamHandler; | ||||
|     } | ||||
| 
 | ||||
|     public void notify(OllamaChatResponseModel currentResponsePart){ | ||||
|     public void notify(OllamaChatResponseModel currentResponsePart) { | ||||
|         responseParts.add(currentResponsePart); | ||||
|         handleCurrentResponsePart(currentResponsePart); | ||||
|     } | ||||
|      | ||||
|     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){ | ||||
| 
 | ||||
|     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) { | ||||
|         message = message + currentResponsePart.getMessage().getContent(); | ||||
|         streamHandler.accept(message); | ||||
|     } | ||||
| @@ -0,0 +1,40 @@ | ||||
| package io.github.ollama4j.models.embeddings; | ||||
|  | ||||
| import io.github.ollama4j.utils.Options; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Builderclass to easily create Requests for Embedding models using ollama. | ||||
|  */ | ||||
| public class OllamaEmbedRequestBuilder { | ||||
|  | ||||
|     private final OllamaEmbedRequestModel request; | ||||
|  | ||||
|     private OllamaEmbedRequestBuilder(String model, List<String> input) { | ||||
|         this.request = new OllamaEmbedRequestModel(model,input); | ||||
|     } | ||||
|  | ||||
|     public static OllamaEmbedRequestBuilder getInstance(String model, String... input){ | ||||
|         return new OllamaEmbedRequestBuilder(model, List.of(input)); | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withOptions(Options options){ | ||||
|         this.request.setOptions(options.getOptionsMap()); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withKeepAlive(String keepAlive){ | ||||
|         this.request.setKeepAlive(keepAlive); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestBuilder withoutTruncate(){ | ||||
|         this.request.setTruncate(false); | ||||
|         return this; | ||||
|     } | ||||
|  | ||||
|     public OllamaEmbedRequestModel build() { | ||||
|         return this.request; | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,41 @@ | ||||
| package io.github.ollama4j.models.embeddings; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
| import lombok.NonNull; | ||||
| import lombok.RequiredArgsConstructor; | ||||
|  | ||||
| import java.util.List; | ||||
| import java.util.Map; | ||||
|  | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
|  | ||||
| @Data | ||||
| @RequiredArgsConstructor | ||||
| @NoArgsConstructor | ||||
| public class OllamaEmbedRequestModel { | ||||
|     @NonNull | ||||
|     private String model; | ||||
|  | ||||
|     @NonNull | ||||
|     private List<String> input; | ||||
|  | ||||
|     private Map<String, Object> options; | ||||
|  | ||||
|     @JsonProperty(value = "keep_alive") | ||||
|     private String keepAlive; | ||||
|  | ||||
|     @JsonProperty(value = "truncate") | ||||
|     private Boolean truncate = true; | ||||
|  | ||||
|     @Override | ||||
|     public String toString() { | ||||
|         try { | ||||
|             return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             throw new RuntimeException(e); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,25 @@ | ||||
| package io.github.ollama4j.models.embeddings; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @SuppressWarnings("unused") | ||||
| @Data | ||||
| public class OllamaEmbedResponseModel { | ||||
|     @JsonProperty("model") | ||||
|     private String model; | ||||
|  | ||||
|     @JsonProperty("embeddings") | ||||
|     private List<List<Double>> embeddings; | ||||
|  | ||||
|     @JsonProperty("total_duration") | ||||
|     private long totalDuration; | ||||
|  | ||||
|     @JsonProperty("load_duration") | ||||
|     private long loadDuration; | ||||
|  | ||||
|     @JsonProperty("prompt_eval_count") | ||||
|     private int promptEvalCount; | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||
| package io.github.ollama4j.models.embeddings; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| 
 | ||||
| @@ -7,6 +7,7 @@ import lombok.Data; | ||||
| 
 | ||||
| @SuppressWarnings("unused") | ||||
| @Data | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingResponseModel { | ||||
|     @JsonProperty("embedding") | ||||
|     private List<Double> embedding; | ||||
| @@ -1,7 +1,8 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||
| package io.github.ollama4j.models.embeddings; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| 
 | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingsRequestBuilder { | ||||
| 
 | ||||
|     private OllamaEmbeddingsRequestBuilder(String model, String prompt){ | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.embeddings; | ||||
| package io.github.ollama4j.models.embeddings; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| import java.util.Map; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| @@ -12,6 +12,7 @@ import lombok.RequiredArgsConstructor; | ||||
| @Data | ||||
| @RequiredArgsConstructor | ||||
| @NoArgsConstructor | ||||
| @Deprecated(since="1.0.90") | ||||
| public class OllamaEmbeddingsRequestModel { | ||||
|   @NonNull | ||||
|   private String model; | ||||
| @@ -0,0 +1,46 @@ | ||||
| package io.github.ollama4j.models.generate; | ||||
|  | ||||
|  | ||||
| import io.github.ollama4j.models.request.OllamaCommonRequest; | ||||
| import io.github.ollama4j.utils.OllamaRequestBody; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| import lombok.Getter; | ||||
| import lombok.Setter; | ||||
|  | ||||
| @Getter | ||||
| @Setter | ||||
| public class OllamaGenerateRequest extends OllamaCommonRequest implements OllamaRequestBody{ | ||||
|  | ||||
|   private String prompt; | ||||
|   private List<String> images; | ||||
|  | ||||
|   private String system; | ||||
|   private String context; | ||||
|   private boolean raw; | ||||
|  | ||||
|   public OllamaGenerateRequest() { | ||||
|   } | ||||
|  | ||||
|   public OllamaGenerateRequest(String model, String prompt) { | ||||
|     this.model = model; | ||||
|     this.prompt = prompt; | ||||
|   } | ||||
|  | ||||
|   public OllamaGenerateRequest(String model, String prompt, List<String> images) { | ||||
|     this.model = model; | ||||
|     this.prompt = prompt; | ||||
|     this.images = images; | ||||
|   } | ||||
|  | ||||
|     @Override | ||||
|   public boolean equals(Object o) { | ||||
|     if (!(o instanceof OllamaGenerateRequest)) { | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     return this.toString().equals(o.toString()); | ||||
|   } | ||||
|  | ||||
| } | ||||
| @@ -1,24 +1,24 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||
| package io.github.ollama4j.models.generate; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||
| import io.github.ollama4j.utils.Options; | ||||
| 
 | ||||
| /** | ||||
|  * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel}  | ||||
|  * Helper class for creating {@link OllamaGenerateRequest} | ||||
|  * objects using the builder-pattern. | ||||
|  */ | ||||
| public class OllamaGenerateRequestBuilder { | ||||
| 
 | ||||
|     private OllamaGenerateRequestBuilder(String model, String prompt){ | ||||
|         request = new OllamaGenerateRequestModel(model, prompt); | ||||
|         request = new OllamaGenerateRequest(model, prompt); | ||||
|     } | ||||
| 
 | ||||
|     private OllamaGenerateRequestModel request; | ||||
|     private OllamaGenerateRequest request; | ||||
| 
 | ||||
|     public static OllamaGenerateRequestBuilder getInstance(String model){ | ||||
|         return new OllamaGenerateRequestBuilder(model,""); | ||||
|     } | ||||
| 
 | ||||
|     public OllamaGenerateRequestModel build(){ | ||||
|     public OllamaGenerateRequest build(){ | ||||
|         return request; | ||||
|     } | ||||
| 
 | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||
| package io.github.ollama4j.models.generate; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| @@ -1,10 +1,8 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||
| package io.github.ollama4j.models.generate; | ||||
| 
 | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| 
 | ||||
| public class OllamaGenerateStreamObserver { | ||||
| 
 | ||||
|     private OllamaStreamHandler streamHandler; | ||||
| @@ -17,12 +15,12 @@ public class OllamaGenerateStreamObserver { | ||||
|         this.streamHandler = streamHandler; | ||||
|     } | ||||
| 
 | ||||
|     public void notify(OllamaGenerateResponseModel currentResponsePart){ | ||||
|     public void notify(OllamaGenerateResponseModel currentResponsePart) { | ||||
|         responseParts.add(currentResponsePart); | ||||
|         handleCurrentResponsePart(currentResponsePart); | ||||
|     } | ||||
|      | ||||
|     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){ | ||||
| 
 | ||||
|     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) { | ||||
|         message = message + currentResponsePart.getResponse(); | ||||
|         streamHandler.accept(message); | ||||
|     } | ||||
| @@ -0,0 +1,7 @@ | ||||
| package io.github.ollama4j.models.generate; | ||||
|  | ||||
| import java.util.function.Consumer; | ||||
|  | ||||
| public interface OllamaStreamHandler extends Consumer<String> { | ||||
|     void accept(String message); | ||||
| } | ||||
| @@ -0,0 +1,63 @@ | ||||
| package io.github.ollama4j.models.ps; | ||||
|  | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @Data | ||||
| @NoArgsConstructor | ||||
| @JsonIgnoreProperties(ignoreUnknown = true) | ||||
| public class ModelsProcessResponse { | ||||
|     @JsonProperty("models") | ||||
|     private List<ModelProcess> models; | ||||
|  | ||||
|     @Data | ||||
|     @NoArgsConstructor | ||||
|     public static class ModelProcess { | ||||
|         @JsonProperty("name") | ||||
|         private String name; | ||||
|  | ||||
|         @JsonProperty("model") | ||||
|         private String model; | ||||
|  | ||||
|         @JsonProperty("size") | ||||
|         private long size; | ||||
|  | ||||
|         @JsonProperty("digest") | ||||
|         private String digest; | ||||
|  | ||||
|         @JsonProperty("details") | ||||
|         private ModelDetails details; | ||||
|  | ||||
|         @JsonProperty("expires_at") | ||||
|         private String expiresAt; // Consider using LocalDateTime if you need to process date/time | ||||
|  | ||||
|         @JsonProperty("size_vram") | ||||
|         private long sizeVram; | ||||
|     } | ||||
|  | ||||
|     @Data | ||||
|     @NoArgsConstructor | ||||
|     public static class ModelDetails { | ||||
|         @JsonProperty("parent_model") | ||||
|         private String parentModel; | ||||
|  | ||||
|         @JsonProperty("format") | ||||
|         private String format; | ||||
|  | ||||
|         @JsonProperty("family") | ||||
|         private String family; | ||||
|  | ||||
|         @JsonProperty("families") | ||||
|         private List<String> families; | ||||
|  | ||||
|         @JsonProperty("parameter_size") | ||||
|         private String parameterSize; | ||||
|  | ||||
|         @JsonProperty("quantization_level") | ||||
|         private String quantizationLevel; | ||||
|     } | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import lombok.AllArgsConstructor; | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import lombok.AllArgsConstructor; | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import lombok.AllArgsConstructor; | ||||
| @@ -0,0 +1,72 @@ | ||||
| package io.github.ollama4j.models.request; | ||||
|  | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import com.fasterxml.jackson.core.type.TypeReference; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.chat.OllamaChatMessage; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.models.chat.OllamaChatResponseModel; | ||||
| import io.github.ollama4j.models.chat.OllamaChatStreamObserver; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| import io.github.ollama4j.utils.OllamaRequestBody; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
|  | ||||
| import java.io.IOException; | ||||
|  | ||||
| /** | ||||
|  * Specialization class for requests | ||||
|  */ | ||||
| public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||
|  | ||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); | ||||
|  | ||||
|     private OllamaChatStreamObserver streamObserver; | ||||
|  | ||||
|     public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { | ||||
|         super(host, basicAuth, requestTimeoutSeconds, verbose); | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     protected String getEndpointSuffix() { | ||||
|         return "/api/chat"; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Parses streamed Response line from ollama chat. | ||||
|      * Using {@link com.fasterxml.jackson.databind.ObjectMapper#readValue(String, TypeReference)} should throw | ||||
|      * {@link IllegalArgumentException} in case of null line or {@link com.fasterxml.jackson.core.JsonParseException} | ||||
|      * in case the JSON Object cannot be parsed to a {@link OllamaChatResponseModel}. Thus, the ResponseModel should | ||||
|      * never be null. | ||||
|      * | ||||
|      * @param line streamed line of ollama stream response | ||||
|      * @param responseBuffer Stringbuffer to add latest response message part to | ||||
|      * @return TRUE, if ollama-Response has 'done' state | ||||
|      */ | ||||
|     @Override | ||||
|     protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { | ||||
|         try { | ||||
|             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||
|             // it seems that under heavy load ollama responds with an empty chat message part in the streamed response | ||||
|             // thus, we null check the message and hope that the next streamed response has some message content again | ||||
|             OllamaChatMessage message = ollamaResponseModel.getMessage(); | ||||
|             if(message != null) { | ||||
|                 responseBuffer.append(message.getContent()); | ||||
|                 if (streamObserver != null) { | ||||
|                     streamObserver.notify(ollamaResponseModel); | ||||
|                 } | ||||
|             } | ||||
|             return ollamaResponseModel.isDone(); | ||||
|         } catch (JsonProcessingException e) { | ||||
|             LOG.error("Error parsing the Ollama chat response!", e); | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) | ||||
|             throws OllamaBaseException, IOException, InterruptedException { | ||||
|         streamObserver = new OllamaChatStreamObserver(streamHandler); | ||||
|         return super.callSync(body); | ||||
|     } | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import java.util.Map; | ||||
| import com.fasterxml.jackson.annotation.JsonInclude; | ||||
| @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import com.fasterxml.jackson.databind.annotation.JsonSerialize; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.utils.BooleanToJsonFormatFlagSerializer; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @JsonInclude(JsonInclude.Include.NON_NULL) | ||||
| public abstract class OllamaCommonRequestModel { | ||||
| public abstract class OllamaCommonRequest { | ||||
|    | ||||
|   protected String model;   | ||||
|   @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class) | ||||
| @@ -1,12 +1,11 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.OllamaAPI; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.response.OllamaErrorResponse; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.utils.OllamaRequestBody; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| 
 | ||||
| @@ -78,19 +77,19 @@ public abstract class OllamaEndpointCaller { | ||||
|             while ((line = reader.readLine()) != null) { | ||||
|                 if (statusCode == 404) { | ||||
|                     LOG.warn("Status code: 404 (Not Found)"); | ||||
|                     OllamaErrorResponseModel ollamaResponseModel = | ||||
|                             Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); | ||||
|                     OllamaErrorResponse ollamaResponseModel = | ||||
|                             Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); | ||||
|                     responseBuffer.append(ollamaResponseModel.getError()); | ||||
|                 } else if (statusCode == 401) { | ||||
|                     LOG.warn("Status code: 401 (Unauthorized)"); | ||||
|                     OllamaErrorResponseModel ollamaResponseModel = | ||||
|                     OllamaErrorResponse ollamaResponseModel = | ||||
|                             Utils.getObjectMapper() | ||||
|                                     .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); | ||||
|                                     .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponse.class); | ||||
|                     responseBuffer.append(ollamaResponseModel.getError()); | ||||
|                 } else if (statusCode == 400) { | ||||
|                     LOG.warn("Status code: 400 (Bad Request)"); | ||||
|                     OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, | ||||
|                             OllamaErrorResponseModel.class); | ||||
|                     OllamaErrorResponse ollamaResponseModel = Utils.getObjectMapper().readValue(line, | ||||
|                             OllamaErrorResponse.class); | ||||
|                     responseBuffer.append(ollamaResponseModel.getError()); | ||||
|                 } else { | ||||
|                     boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); | ||||
| @@ -1,14 +1,13 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||
| package io.github.ollama4j.models.request; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; | ||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import io.github.ollama4j.models.generate.OllamaGenerateResponseModel; | ||||
| import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; | ||||
| import io.github.ollama4j.models.generate.OllamaStreamHandler; | ||||
| import io.github.ollama4j.utils.OllamaRequestBody; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import org.slf4j.Logger; | ||||
| import org.slf4j.LoggerFactory; | ||||
| 
 | ||||
| @@ -0,0 +1,16 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModel { | ||||
|  | ||||
|     private String name; | ||||
|     private String description; | ||||
|     private String pullCount; | ||||
|     private int totalTags; | ||||
|     private List<String> popularTags = new ArrayList<>(); | ||||
|     private String lastUpdated; | ||||
| } | ||||
| @@ -0,0 +1,12 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModelDetail { | ||||
|  | ||||
|     private LibraryModel model; | ||||
|     private List<LibraryModelTag> tags; | ||||
| } | ||||
| @@ -0,0 +1,13 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import lombok.Data; | ||||
|  | ||||
| import java.util.List; | ||||
|  | ||||
| @Data | ||||
| public class LibraryModelTag { | ||||
|     private String name; | ||||
|     private String tag; | ||||
|     private String size; | ||||
|     private String lastUpdated; | ||||
| } | ||||
| @@ -1,6 +1,7 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import java.util.List; | ||||
| 
 | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @@ -1,11 +1,10 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import java.time.LocalDateTime; | ||||
| import java.time.OffsetDateTime; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @@ -1,9 +1,9 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @@ -1,9 +1,9 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import lombok.Data; | ||||
| @@ -0,0 +1,123 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import io.github.ollama4j.exceptions.OllamaBaseException; | ||||
| import io.github.ollama4j.models.generate.OllamaGenerateRequest; | ||||
| import io.github.ollama4j.models.generate.OllamaGenerateResponseModel; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Data; | ||||
| import lombok.EqualsAndHashCode; | ||||
| import lombok.Getter; | ||||
| import lombok.Setter; | ||||
|  | ||||
| import java.io.BufferedReader; | ||||
| import java.io.IOException; | ||||
| import java.io.InputStream; | ||||
| import java.io.InputStreamReader; | ||||
| import java.net.http.HttpClient; | ||||
| import java.net.http.HttpRequest; | ||||
| import java.net.http.HttpResponse; | ||||
| import java.nio.charset.StandardCharsets; | ||||
| import java.time.Duration; | ||||
|  | ||||
| @Data | ||||
| @EqualsAndHashCode(callSuper = true) | ||||
| @SuppressWarnings("unused") | ||||
| public class OllamaAsyncResultStreamer extends Thread { | ||||
|     private final HttpRequest.Builder requestBuilder; | ||||
|     private final OllamaGenerateRequest ollamaRequestModel; | ||||
|     private final OllamaResultStream stream = new OllamaResultStream(); | ||||
|     private String completeResponse; | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a | ||||
|      * failure. If the request was a failure, the `getResponse()` method will return the error | ||||
|      * message. | ||||
|      */ | ||||
|     @Getter | ||||
|     private boolean succeeded; | ||||
|  | ||||
|     @Setter | ||||
|     private long requestTimeoutSeconds; | ||||
|  | ||||
|     /** | ||||
|      * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama | ||||
|      * server. | ||||
|      */ | ||||
|     @Getter | ||||
|     private int httpStatusCode; | ||||
|  | ||||
|     /** | ||||
|      * -- GETTER -- Returns the response time in milliseconds. | ||||
|      */ | ||||
|     @Getter | ||||
|     private long responseTime = 0; | ||||
|  | ||||
|     public OllamaAsyncResultStreamer( | ||||
|             HttpRequest.Builder requestBuilder, | ||||
|             OllamaGenerateRequest ollamaRequestModel, | ||||
|             long requestTimeoutSeconds) { | ||||
|         this.requestBuilder = requestBuilder; | ||||
|         this.ollamaRequestModel = ollamaRequestModel; | ||||
|         this.completeResponse = ""; | ||||
|         this.stream.add(""); | ||||
|         this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||
|     } | ||||
|  | ||||
|     @Override | ||||
|     public void run() { | ||||
|         ollamaRequestModel.setStream(true); | ||||
|         HttpClient httpClient = HttpClient.newHttpClient(); | ||||
|         try { | ||||
|             long startTime = System.currentTimeMillis(); | ||||
|             HttpRequest request = | ||||
|                     requestBuilder | ||||
|                             .POST( | ||||
|                                     HttpRequest.BodyPublishers.ofString( | ||||
|                                             Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) | ||||
|                             .header("Content-Type", "application/json") | ||||
|                             .timeout(Duration.ofSeconds(requestTimeoutSeconds)) | ||||
|                             .build(); | ||||
|             HttpResponse<InputStream> response = | ||||
|                     httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||
|             int statusCode = response.statusCode(); | ||||
|             this.httpStatusCode = statusCode; | ||||
|  | ||||
|             InputStream responseBodyStream = response.body(); | ||||
|             try (BufferedReader reader = | ||||
|                          new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||
|                 String line; | ||||
|                 StringBuilder responseBuffer = new StringBuilder(); | ||||
|                 while ((line = reader.readLine()) != null) { | ||||
|                     if (statusCode == 404) { | ||||
|                         OllamaErrorResponse ollamaResponseModel = | ||||
|                                 Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class); | ||||
|                         stream.add(ollamaResponseModel.getError()); | ||||
|                         responseBuffer.append(ollamaResponseModel.getError()); | ||||
|                     } else { | ||||
|                         OllamaGenerateResponseModel ollamaResponseModel = | ||||
|                                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||
|                         String res = ollamaResponseModel.getResponse(); | ||||
|                         stream.add(res); | ||||
|                         if (!ollamaResponseModel.isDone()) { | ||||
|                             responseBuffer.append(res); | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|                 this.succeeded = true; | ||||
|                 this.completeResponse = responseBuffer.toString(); | ||||
|                 long endTime = System.currentTimeMillis(); | ||||
|                 responseTime = endTime - startTime; | ||||
|             } | ||||
|             if (statusCode != 200) { | ||||
|                 throw new OllamaBaseException(this.completeResponse); | ||||
|             } | ||||
|         } catch (IOException | InterruptedException | OllamaBaseException e) { | ||||
|             this.succeeded = false; | ||||
|             this.completeResponse = "[FAILED] " + e.getMessage(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| } | ||||
|  | ||||
| @@ -1,11 +1,11 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @Data | ||||
| @JsonIgnoreProperties(ignoreUnknown = true) | ||||
| public class OllamaErrorResponseModel { | ||||
| public class OllamaErrorResponse { | ||||
| 
 | ||||
|   private String error; | ||||
| } | ||||
| @@ -1,6 +1,6 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.models; | ||||
| package io.github.ollama4j.models.response; | ||||
| 
 | ||||
| import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; | ||||
| import static io.github.ollama4j.utils.Utils.getObjectMapper; | ||||
| 
 | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import lombok.Data; | ||||
| @@ -0,0 +1,18 @@ | ||||
| package io.github.ollama4j.models.response; | ||||
|  | ||||
| import java.util.Iterator; | ||||
| import java.util.LinkedList; | ||||
| import java.util.Queue; | ||||
|  | ||||
| public class OllamaResultStream extends LinkedList<String> implements Queue<String> { | ||||
|     @Override | ||||
|     public String poll() { | ||||
|         StringBuilder tokens = new StringBuilder(); | ||||
|         Iterator<String> iterator = this.listIterator(); | ||||
|         while (iterator.hasNext()) { | ||||
|             tokens.append(iterator.next()); | ||||
|             iterator.remove(); | ||||
|         } | ||||
|         return tokens.toString(); | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,35 @@ | ||||
| package io.github.ollama4j.tools; | ||||
|  | ||||
| import io.github.ollama4j.models.response.OllamaResult; | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| import lombok.NoArgsConstructor; | ||||
|  | ||||
| import java.util.ArrayList; | ||||
| import java.util.List; | ||||
| import java.util.Map; | ||||
|  | ||||
| @Data | ||||
| @NoArgsConstructor | ||||
| @AllArgsConstructor | ||||
| public class OllamaToolsResult { | ||||
|     private OllamaResult modelResult; | ||||
|     private Map<ToolFunctionCallSpec, Object> toolResults; | ||||
|  | ||||
|     public List<ToolResult> getToolResults() { | ||||
|         List<ToolResult> results = new ArrayList<>(); | ||||
|         for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) { | ||||
|             results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue())); | ||||
|         } | ||||
|         return results; | ||||
|     } | ||||
|  | ||||
|     @Data | ||||
|     @NoArgsConstructor | ||||
|     @AllArgsConstructor | ||||
|     public static class ToolResult { | ||||
|         private String functionName; | ||||
|         private Map<String, Object> functionArguments; | ||||
|         private Object result; | ||||
|     } | ||||
| } | ||||
| @@ -1,8 +1,8 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
| package io.github.ollama4j.tools; | ||||
| 
 | ||||
| import java.util.Map; | ||||
| 
 | ||||
| @FunctionalInterface | ||||
| public interface DynamicFunction { | ||||
| public interface ToolFunction { | ||||
|     Object apply(Map<String, Object> arguments); | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
| package io.github.ollama4j.tools; | ||||
| 
 | ||||
| import lombok.AllArgsConstructor; | ||||
| import lombok.Data; | ||||
| @@ -9,10 +9,8 @@ import java.util.Map; | ||||
| @Data | ||||
| @AllArgsConstructor | ||||
| @NoArgsConstructor | ||||
| public class ToolDef { | ||||
| 
 | ||||
| public class ToolFunctionCallSpec { | ||||
|     private String name; | ||||
|     private Map<String, Object> arguments; | ||||
| 
 | ||||
| } | ||||
| 
 | ||||
							
								
								
									
										16
									
								
								src/main/java/io/github/ollama4j/tools/ToolRegistry.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								src/main/java/io/github/ollama4j/tools/ToolRegistry.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| package io.github.ollama4j.tools; | ||||
|  | ||||
| import java.util.HashMap; | ||||
| import java.util.Map; | ||||
|  | ||||
| public class ToolRegistry { | ||||
|     private final Map<String, ToolFunction> functionMap = new HashMap<>(); | ||||
|  | ||||
|     public ToolFunction getFunction(String name) { | ||||
|         return functionMap.get(name); | ||||
|     } | ||||
|  | ||||
|     public void addFunction(String name, ToolFunction function) { | ||||
|         functionMap.put(name, function); | ||||
|     } | ||||
| } | ||||
| @@ -1,11 +1,11 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.tools; | ||||
| package io.github.ollama4j.tools; | ||||
| 
 | ||||
| import com.fasterxml.jackson.annotation.JsonIgnore; | ||||
| import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||
| import com.fasterxml.jackson.annotation.JsonInclude; | ||||
| import com.fasterxml.jackson.annotation.JsonProperty; | ||||
| import com.fasterxml.jackson.core.JsonProcessingException; | ||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||
| import io.github.ollama4j.utils.Utils; | ||||
| import lombok.Builder; | ||||
| import lombok.Data; | ||||
| 
 | ||||
| @@ -14,14 +14,14 @@ import java.util.HashMap; | ||||
| import java.util.List; | ||||
| import java.util.Map; | ||||
| 
 | ||||
| public class MistralTools { | ||||
| public class Tools { | ||||
|     @Data | ||||
|     @Builder | ||||
|     public static class ToolSpecification { | ||||
|         private String functionName; | ||||
|         private String functionDesc; | ||||
|         private Map<String, PromptFuncDefinition.Property> props; | ||||
|         private DynamicFunction toolDefinition; | ||||
|         private String functionDescription; | ||||
|         private Map<String, PromptFuncDefinition.Property> properties; | ||||
|         private ToolFunction toolDefinition; | ||||
|     } | ||||
| 
 | ||||
|     @Data | ||||
| @@ -90,14 +90,14 @@ public class MistralTools { | ||||
| 
 | ||||
|             PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||
|             functionDetail.setName(spec.getFunctionName()); | ||||
|             functionDetail.setDescription(spec.getFunctionDesc()); | ||||
|             functionDetail.setDescription(spec.getFunctionDescription()); | ||||
| 
 | ||||
|             PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||
|             parameters.setType("object"); | ||||
|             parameters.setProperties(spec.getProps()); | ||||
|             parameters.setProperties(spec.getProperties()); | ||||
| 
 | ||||
|             List<String> requiredValues = new ArrayList<>(); | ||||
|             for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProps().entrySet()) { | ||||
|             for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) { | ||||
|                 if (p.getValue().isRequired()) { | ||||
|                     requiredValues.add(p.getKey()); | ||||
|                 } | ||||
| @@ -109,31 +109,5 @@ public class MistralTools { | ||||
|             tools.add(def); | ||||
|             return this; | ||||
|         } | ||||
| // | ||||
| //        public PromptBuilder withToolSpecification(String functionName, String functionDesc, Map<String, PromptFuncDefinition.Property> props) { | ||||
| //            PromptFuncDefinition def = new PromptFuncDefinition(); | ||||
| //            def.setType("function"); | ||||
| // | ||||
| //            PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||
| //            functionDetail.setName(functionName); | ||||
| //            functionDetail.setDescription(functionDesc); | ||||
| // | ||||
| //            PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||
| //            parameters.setType("object"); | ||||
| //            parameters.setProperties(props); | ||||
| // | ||||
| //            List<String> requiredValues = new ArrayList<>(); | ||||
| //            for (Map.Entry<String, PromptFuncDefinition.Property> p : props.entrySet()) { | ||||
| //                if (p.getValue().isRequired()) { | ||||
| //                    requiredValues.add(p.getKey()); | ||||
| //                } | ||||
| //            } | ||||
| //            parameters.setRequired(requiredValues); | ||||
| //            functionDetail.setParameters(parameters); | ||||
| //            def.setFunction(functionDetail); | ||||
| // | ||||
| //            tools.add(def); | ||||
| //            return this; | ||||
| //        } | ||||
|     } | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.types; | ||||
| package io.github.ollama4j.types; | ||||
| 
 | ||||
| /** | ||||
|  * A class to provide constants for all the supported models by Ollama. | ||||
| @@ -10,10 +10,9 @@ package io.github.amithkoujalgi.ollama4j.core.types; | ||||
| public class OllamaModelType { | ||||
|     public static final String GEMMA = "gemma"; | ||||
|     public static final String GEMMA2 = "gemma2"; | ||||
| 
 | ||||
| 
 | ||||
|     public static final String LLAMA2 = "llama2"; | ||||
|     public static final String LLAMA3 = "llama3"; | ||||
|     public static final String LLAMA3_1 = "llama3.1"; | ||||
|     public static final String MISTRAL = "mistral"; | ||||
|     public static final String MIXTRAL = "mixtral"; | ||||
|     public static final String LLAVA = "llava"; | ||||
| @@ -33,7 +32,6 @@ public class OllamaModelType { | ||||
|     public static final String ZEPHYR = "zephyr"; | ||||
|     public static final String OPENHERMES = "openhermes"; | ||||
|     public static final String QWEN = "qwen"; | ||||
| 
 | ||||
|     public static final String QWEN2 = "qwen2"; | ||||
|     public static final String WIZARDCODER = "wizardcoder"; | ||||
|     public static final String LLAMA2_CHINESE = "llama2-chinese"; | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.utils; | ||||
| package io.github.ollama4j.utils; | ||||
| 
 | ||||
| import java.io.IOException; | ||||
| 
 | ||||
| @@ -1,4 +1,4 @@ | ||||
| package io.github.amithkoujalgi.ollama4j.core.utils; | ||||
| package io.github.ollama4j.utils; | ||||
| 
 | ||||
| import java.io.IOException; | ||||
| import java.util.Base64; | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user