forked from Mirror/ollama4j
		
	Compare commits
	
		
			124 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 3ab9e4c283 | ||
|   | 2db6a22cc7 | ||
|   | cc69341620 | ||
|   | 4589a9032c | ||
|   | da273402b5 | ||
|   | cfa8aa14d7 | ||
|   | bc4e8303aa | ||
|   | f2f740a2a0 | ||
|   | 4cbb783a61 | ||
|   | 5c9e0b7d8a | ||
|   | 2f8577a24d | ||
|   | 02116b7025 | ||
|   | f3778f8786 | ||
|   | c6141634db | ||
|   | d9f98ad901 | ||
|   | 79d97445b8 | ||
|   | 1c40697c96 | ||
|   | f03026abb3 | ||
|   | 63a6e81ac2 | ||
|   | 76cad0f584 | ||
|   | bee2908d1e | ||
|   | 8a4c9fd969 | ||
|   | d470f940b0 | ||
|   | df402efaba | ||
|   | 677362abbf | ||
|   | 81689be194 | ||
|   | fd93036d08 | ||
|   | c9b05a725b | ||
|   | a4e1b4afe9 | ||
|   | 3d21813abb | ||
|   | 383d0f56ca | ||
|   | af1b213a76 | ||
|   | fed89a9643 | ||
|   | fd32aa33ff | ||
|   | b8a13e89b1 | ||
|   | c8f27edd6e | ||
|   | 5a936d8174 | ||
|   | 9b5ddbf4c4 | ||
|   | 7c233d5734 | ||
|   | e85aeae6e0 | ||
|   | a05052e095 | ||
|   | 10eb803e26 | ||
|   | bd2da8fdda | ||
|   | b0bb082bec | ||
|   | 81f564ef7f | ||
|   | 006b52f3db | ||
|   | 16634e60e4 | ||
|   | db8b73075b | ||
|   | dc9f79959a | ||
|   | 88f6d00763 | ||
|   | fd3a989a49 | ||
|   | 7580c6a549 | ||
|   | 9e6503d84b | ||
|   | ee21f7fdd8 | ||
|   | ecc295f484 | ||
|   | c528fef5fc | ||
|   | 38f1bda105 | ||
|   | d8a703503a | ||
|   | dd9ba7c937 | ||
|   | cf52c9610c | ||
|   | e8d709e99a | ||
|   | 51fbedad69 | ||
|   | 953605fa73 | ||
|   | 30bfdd9c6d | ||
|   | 91ee6cb4c1 | ||
|   | 8ef6fac28e | ||
|   | d9e3860123 | ||
|   | 515d1f0399 | ||
|   | be549430c5 | ||
|   | 4744315d45 | ||
|   | 8eea19a539 | ||
|   | b5801d84e0 | ||
|   | 165d04b1bb | ||
|   | 16d2160b52 | ||
|   | e39c47b8e1 | ||
|   | bb0785140b | ||
|   | e33ad1a1e3 | ||
|   | cd60c506cb | ||
|   | b55925df28 | ||
|   | 3a9b8c309d | ||
|   | bf07159522 | ||
|   | f8ca4d041d | ||
|   | 9c6a55f7b0 | ||
|   | 2866d83a2f | ||
|   | 45e5d07581 | ||
|   | 3a264cb6bb | ||
|   | e1b9d42771 | ||
|   | 1a086c37c0 | ||
|   | 54edba144c | ||
|   | 3ed3187ba9 | ||
|   | b7cd81a7f5 | ||
|   | e750c2d7f9 | ||
|   | 62f16131f3 | ||
|   | 2cbaf12d7c | ||
|   | e2d555d404 | ||
|   | c296b34174 | ||
|   | e8f99f28ec | ||
|   | 250b1abc79 | ||
|   | 42b15ad93f | ||
|   | 6f7a714bae | ||
|   | 92618e5084 | ||
|   | 391a9242c3 | ||
|   | e1b6dc3b54 | ||
|   | 04124cf978 | ||
|   | e4e717b747 | ||
|   | 10d2a8f5ff | ||
|   | 899fa38805 | ||
|   | 2df878c953 | ||
|   | 78a5eedc8f | ||
|   | 364f961ee2 | ||
|   | b21aa6add2 | ||
|   | ec4abd1c2d | ||
|   | 9900ae92fb | ||
|   | fa20daf6e5 | ||
|   | 44949c0559 | ||
|   | e88711a017 | ||
|   | 32169ded18 | ||
|   | 4b2d566fd9 | ||
|   | fb4b7a7ce5 | ||
|   | 18f27775b0 | ||
|   | cb462ad05a | ||
|   | 1eec22ca1a | ||
|   | c1f3c51f88 | ||
|   | 9224d2da06 | 
							
								
								
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								.github/workflows/gh-mvn-publish.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | |||||||
|  | name: Release Artifacts to GitHub Maven Packages | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   release: | ||||||
|  |     types: [ created ] | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   build: | ||||||
|  |  | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     permissions: | ||||||
|  |       contents: read | ||||||
|  |       packages: write | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v3 | ||||||
|  |       - name: Set up JDK 17 | ||||||
|  |         uses: actions/setup-java@v3 | ||||||
|  |         with: | ||||||
|  |           java-version: '17' | ||||||
|  |           distribution: 'temurin' | ||||||
|  |           server-id: github | ||||||
|  |           settings-path: ${{ github.workspace }} | ||||||
|  |  | ||||||
|  |       - name: maven-settings-xml-action | ||||||
|  |         uses: whelk-io/maven-settings-xml-action@v22 | ||||||
|  |         with: | ||||||
|  |           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "mvn-repo-id" | ||||||
|  |           replace: github | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Import GPG key | ||||||
|  |         uses: crazy-max/ghaction-import-gpg@v6 | ||||||
|  |         with: | ||||||
|  |           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||||
|  |           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||||
|  |       - name: List keys | ||||||
|  |         run: gpg -K | ||||||
|  |  | ||||||
|  |       - name: Build with Maven | ||||||
|  |         run: mvn --file pom.xml -U clean package -Punit-tests | ||||||
|  |  | ||||||
|  |       - name: Publish to GitHub Packages Apache Maven | ||||||
|  |         run: mvn deploy -Punit-tests -s $GITHUB_WORKSPACE/settings.xml -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=github -Drepo.user=${{ secrets.GH_MVN_USER }} -Drepo.pass=${{ secrets.GH_MVN_PASS }} -DaltDeploymentRepository=github::default::https://maven.pkg.github.com/ollama4j/ollama4j | ||||||
|  |         env: | ||||||
|  |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
							
								
								
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										121
									
								
								.github/workflows/maven-publish.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,68 +1,95 @@ | |||||||
| # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | # This workflow will build a package using Maven and then publish it to GitHub packages when a release is created | ||||||
| # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | # For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path | ||||||
|  |  | ||||||
| name: Test and Publish Package | name: Release Artifacts to Maven Central | ||||||
|  |  | ||||||
| #on: |  | ||||||
| #  release: |  | ||||||
| #    types: [ "created" ] |  | ||||||
|  |  | ||||||
| on: | on: | ||||||
|   push: |   release: | ||||||
|     branches: [ "main" ] |     types: [ created ] | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
|  | #on: | ||||||
|  | #  pull_request: | ||||||
|  | #    types: [ opened, reopened ] | ||||||
|  | #    branches: [ "main" ] | ||||||
|  |  | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   build: |   build: | ||||||
|  |  | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|     permissions: |     permissions: | ||||||
|       contents: write |       contents: write | ||||||
|       packages: write |       packages: write | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v3 |       - uses: actions/checkout@v3 | ||||||
|       - name: Set up JDK 11 |  | ||||||
|  |       - name: Set up JDK 17 | ||||||
|         uses: actions/setup-java@v3 |         uses: actions/setup-java@v3 | ||||||
|         with: |         with: | ||||||
|           java-version: '11' |           java-version: '17' | ||||||
|           distribution: 'adopt-hotspot' |           distribution: 'temurin' | ||||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml |           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml | ||||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file |           settings-path: ${{ github.workspace }} # location for the settings.xml file | ||||||
|       - name: Build with Maven |  | ||||||
|         run: mvn --file pom.xml -U clean package -Punit-tests |       - name: maven-settings-xml-action | ||||||
|       - name: Set up Apache Maven Central (Overwrite settings.xml) |         uses: whelk-io/maven-settings-xml-action@v22 | ||||||
|         uses: actions/setup-java@v3 |  | ||||||
|         with: # running setup-java again overwrites the settings.xml |  | ||||||
|           java-version: '11' |  | ||||||
|           distribution: 'adopt-hotspot' |  | ||||||
|           cache: 'maven' |  | ||||||
|           server-id: ossrh |  | ||||||
|           server-username: MAVEN_USERNAME |  | ||||||
|           server-password: MAVEN_PASSWORD |  | ||||||
|           gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} |  | ||||||
|           gpg-passphrase: MAVEN_GPG_PASSPHRASE |  | ||||||
|       - name: Set up Maven cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |         with: | ||||||
|           path: ~/.m2/repository |           servers: '[{ "id": "${repo.id}", "username": "${repo.user}", "password": "${repo.pass}" }]' | ||||||
|           key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} |  | ||||||
|           restore-keys: | |       - name: Import GPG key | ||||||
|             ${{ runner.os }}-maven- |         uses: crazy-max/ghaction-import-gpg@v6 | ||||||
|       - name: Build |         with: | ||||||
|         run: mvn -B -ntp clean install |           gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} | ||||||
|       - name: Upload coverage reports to Codecov |           passphrase: ${{ secrets.GPG_PASSPHRASE }} | ||||||
|         uses: codecov/codecov-action@v3 |       - name: List keys | ||||||
|  |         run: gpg -K | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "mvn-repo-id" | ||||||
|  |           replace: central | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|  |       - name: Publish to Maven Central | ||||||
|  |         run: mvn deploy -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Drepo.id=central -Drepo.user=${{ secrets.MVN_USER }} -Drepo.pass=${{ secrets.MVN_PASS }} | ||||||
|  |  | ||||||
|  |       - name: Upload Release Asset - JAR | ||||||
|  |         uses: actions/upload-release-asset@v1 | ||||||
|         env: |         env: | ||||||
|           CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|       - name: Publish to GitHub Packages Apache Maven |         with: | ||||||
|         #        if: > |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|         #          github.event_name != 'pull_request' && |           asset_path: target/ollama4j-${{ github.ref_name }}.jar | ||||||
|         #          github.ref_name == 'main' && |           asset_name: ollama4j-${{ github.ref_name }}.jar | ||||||
|         #          contains(github.event.head_commit.message, 'release') |           asset_content_type: application/x-jar | ||||||
|         run: | |  | ||||||
|           git config --global user.email "koujalgi.amith@gmail.com" |       - name: Upload Release Asset - Javadoc JAR | ||||||
|           git config --global user.name "amithkoujalgi" |         uses: actions/upload-release-asset@v1 | ||||||
|           mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform |  | ||||||
|         env: |         env: | ||||||
|           MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|           MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} |         with: | ||||||
|           MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|  |           asset_path: target/ollama4j-${{ github.ref_name }}-javadoc.jar | ||||||
|  |           asset_name: ollama4j-${{ github.ref_name }}-javadoc.jar | ||||||
|  |           asset_content_type: application/x-jar | ||||||
|  |  | ||||||
|  |       - name: Upload Release Asset - Sources JAR | ||||||
|  |         uses: actions/upload-release-asset@v1 | ||||||
|  |         env: | ||||||
|  |           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |         with: | ||||||
|  |           upload_url: ${{ github.event.release.upload_url }} | ||||||
|  |           asset_path: target/ollama4j-${{ github.ref_name }}-sources.jar | ||||||
|  |           asset_name: ollama4j-${{ github.ref_name }}-sources.jar | ||||||
|  |           asset_content_type: application/x-jar | ||||||
							
								
								
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/publish-docs.yml
									
									
									
									
										vendored
									
									
								
							| @@ -2,9 +2,8 @@ | |||||||
| name: Deploy Docs to GH Pages | name: Deploy Docs to GH Pages | ||||||
|  |  | ||||||
| on: | on: | ||||||
|   # Runs on pushes targeting the default branch |   release: | ||||||
|   push: |     types: [ created ] | ||||||
|     branches: [ "main" ] |  | ||||||
|  |  | ||||||
|   # Allows you to run this workflow manually from the Actions tab |   # Allows you to run this workflow manually from the Actions tab | ||||||
|   workflow_dispatch: |   workflow_dispatch: | ||||||
| @@ -47,6 +46,13 @@ jobs: | |||||||
|       - run: cd docs && npm ci |       - run: cd docs && npm ci | ||||||
|       - run: cd docs && npm run build |       - run: cd docs && npm run build | ||||||
|  |  | ||||||
|  |       - name: Find and Replace | ||||||
|  |         uses: jacobtomlinson/gha-find-replace@v3 | ||||||
|  |         with: | ||||||
|  |           find: "ollama4j-revision" | ||||||
|  |           replace: ${{ github.ref_name }} | ||||||
|  |           regex: false | ||||||
|  |  | ||||||
|       - name: Build with Maven |       - name: Build with Maven | ||||||
|         run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs |         run: mvn --file pom.xml -U clean package && cp -r ./target/apidocs/. ./docs/build/apidocs | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/publish-javadoc.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,52 +0,0 @@ | |||||||
| # Simple workflow for deploying static content to GitHub Pages |  | ||||||
| name: Deploy Javadoc content to Pages |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   # Runs on pushes targeting the default branch |  | ||||||
|   push: |  | ||||||
|     branches: [ "none" ] |  | ||||||
|  |  | ||||||
|   # Allows you to run this workflow manually from the Actions tab |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|   pages: write |  | ||||||
|   id-token: write |  | ||||||
|   packages: write |  | ||||||
| # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. |  | ||||||
| # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. |  | ||||||
| concurrency: |  | ||||||
|   group: "pages" |  | ||||||
|   cancel-in-progress: false |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   # Single deploy job since we're just deploying |  | ||||||
|   deploy: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     environment: |  | ||||||
|       name: github-pages |  | ||||||
|       url: ${{ steps.deployment.outputs.page_url }} |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|       - name: Set up JDK 11 |  | ||||||
|         uses: actions/setup-java@v3 |  | ||||||
|         with: |  | ||||||
|           java-version: '11' |  | ||||||
|           distribution: 'adopt-hotspot' |  | ||||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml |  | ||||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file |  | ||||||
|       - name: Build with Maven |  | ||||||
|         run: mvn --file pom.xml -U clean package |  | ||||||
|       - name: Setup Pages |  | ||||||
|         uses: actions/configure-pages@v3 |  | ||||||
|       - name: Upload artifact |  | ||||||
|         uses: actions/upload-pages-artifact@v2 |  | ||||||
|         with: |  | ||||||
|           # Upload entire repository |  | ||||||
|           path: './target/apidocs/.' |  | ||||||
|       - name: Deploy to GitHub Pages |  | ||||||
|         id: deployment |  | ||||||
|         uses: actions/deploy-pages@v2 |  | ||||||
							
								
								
									
										41
									
								
								.github/workflows/release-jar.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										41
									
								
								.github/workflows/release-jar.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,41 +0,0 @@ | |||||||
| name: Release JAR |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     tags: |  | ||||||
|       - '**' |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|   id-token: write |  | ||||||
|   packages: write |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   build: |  | ||||||
|  |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     permissions: |  | ||||||
|       contents: write |  | ||||||
|       pull-requests: write |  | ||||||
|       repository-projects: write |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|       - name: Set up JDK 11 |  | ||||||
|         uses: actions/setup-java@v3 |  | ||||||
|         with: |  | ||||||
|           java-version: '11' |  | ||||||
|           distribution: 'temurin' |  | ||||||
|           server-id: github # Value of the distributionManagement/repository/id field of the pom.xml |  | ||||||
|           settings-path: ${{ github.workspace }} # location for the settings.xml file |  | ||||||
|  |  | ||||||
|       - name: Build with Maven |  | ||||||
|         run: mvn -B clean install package --file pom.xml |  | ||||||
|  |  | ||||||
|       - name: Release Assets |  | ||||||
|         uses: softprops/action-gh-release@v1 |  | ||||||
|         env: |  | ||||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |  | ||||||
|         with: |  | ||||||
|           files: target/*.jar |  | ||||||
							
								
								
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								CODE_OF_CONDUCT.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,128 @@ | |||||||
|  | # Contributor Covenant Code of Conduct | ||||||
|  |  | ||||||
|  | ## Our Pledge | ||||||
|  |  | ||||||
|  | We as members, contributors, and leaders pledge to make participation in our | ||||||
|  | community a harassment-free experience for everyone, regardless of age, body | ||||||
|  | size, visible or invisible disability, ethnicity, sex characteristics, gender | ||||||
|  | identity and expression, level of experience, education, socio-economic status, | ||||||
|  | nationality, personal appearance, race, religion, or sexual identity | ||||||
|  | and orientation. | ||||||
|  |  | ||||||
|  | We pledge to act and interact in ways that contribute to an open, welcoming, | ||||||
|  | diverse, inclusive, and healthy community. | ||||||
|  |  | ||||||
|  | ## Our Standards | ||||||
|  |  | ||||||
|  | Examples of behavior that contributes to a positive environment for our | ||||||
|  | community include: | ||||||
|  |  | ||||||
|  | * Demonstrating empathy and kindness toward other people | ||||||
|  | * Being respectful of differing opinions, viewpoints, and experiences | ||||||
|  | * Giving and gracefully accepting constructive feedback | ||||||
|  | * Accepting responsibility and apologizing to those affected by our mistakes, | ||||||
|  |   and learning from the experience | ||||||
|  | * Focusing on what is best not just for us as individuals, but for the | ||||||
|  |   overall community | ||||||
|  |  | ||||||
|  | Examples of unacceptable behavior include: | ||||||
|  |  | ||||||
|  | * The use of sexualized language or imagery, and sexual attention or | ||||||
|  |   advances of any kind | ||||||
|  | * Trolling, insulting or derogatory comments, and personal or political attacks | ||||||
|  | * Public or private harassment | ||||||
|  | * Publishing others' private information, such as a physical or email | ||||||
|  |   address, without their explicit permission | ||||||
|  | * Other conduct which could reasonably be considered inappropriate in a | ||||||
|  |   professional setting | ||||||
|  |  | ||||||
|  | ## Enforcement Responsibilities | ||||||
|  |  | ||||||
|  | Community leaders are responsible for clarifying and enforcing our standards of | ||||||
|  | acceptable behavior and will take appropriate and fair corrective action in | ||||||
|  | response to any behavior that they deem inappropriate, threatening, offensive, | ||||||
|  | or harmful. | ||||||
|  |  | ||||||
|  | Community leaders have the right and responsibility to remove, edit, or reject | ||||||
|  | comments, commits, code, wiki edits, issues, and other contributions that are | ||||||
|  | not aligned to this Code of Conduct, and will communicate reasons for moderation | ||||||
|  | decisions when appropriate. | ||||||
|  |  | ||||||
|  | ## Scope | ||||||
|  |  | ||||||
|  | This Code of Conduct applies within all community spaces, and also applies when | ||||||
|  | an individual is officially representing the community in public spaces. | ||||||
|  | Examples of representing our community include using an official e-mail address, | ||||||
|  | posting via an official social media account, or acting as an appointed | ||||||
|  | representative at an online or offline event. | ||||||
|  |  | ||||||
|  | ## Enforcement | ||||||
|  |  | ||||||
|  | Instances of abusive, harassing, or otherwise unacceptable behavior may be | ||||||
|  | reported to the community leaders responsible for enforcement at | ||||||
|  | koujalgi.amith@gmail.com. | ||||||
|  | All complaints will be reviewed and investigated promptly and fairly. | ||||||
|  |  | ||||||
|  | All community leaders are obligated to respect the privacy and security of the | ||||||
|  | reporter of any incident. | ||||||
|  |  | ||||||
|  | ## Enforcement Guidelines | ||||||
|  |  | ||||||
|  | Community leaders will follow these Community Impact Guidelines in determining | ||||||
|  | the consequences for any action they deem in violation of this Code of Conduct: | ||||||
|  |  | ||||||
|  | ### 1. Correction | ||||||
|  |  | ||||||
|  | **Community Impact**: Use of inappropriate language or other behavior deemed | ||||||
|  | unprofessional or unwelcome in the community. | ||||||
|  |  | ||||||
|  | **Consequence**: A private, written warning from community leaders, providing | ||||||
|  | clarity around the nature of the violation and an explanation of why the | ||||||
|  | behavior was inappropriate. A public apology may be requested. | ||||||
|  |  | ||||||
|  | ### 2. Warning | ||||||
|  |  | ||||||
|  | **Community Impact**: A violation through a single incident or series | ||||||
|  | of actions. | ||||||
|  |  | ||||||
|  | **Consequence**: A warning with consequences for continued behavior. No | ||||||
|  | interaction with the people involved, including unsolicited interaction with | ||||||
|  | those enforcing the Code of Conduct, for a specified period of time. This | ||||||
|  | includes avoiding interactions in community spaces as well as external channels | ||||||
|  | like social media. Violating these terms may lead to a temporary or | ||||||
|  | permanent ban. | ||||||
|  |  | ||||||
|  | ### 3. Temporary Ban | ||||||
|  |  | ||||||
|  | **Community Impact**: A serious violation of community standards, including | ||||||
|  | sustained inappropriate behavior. | ||||||
|  |  | ||||||
|  | **Consequence**: A temporary ban from any sort of interaction or public | ||||||
|  | communication with the community for a specified period of time. No public or | ||||||
|  | private interaction with the people involved, including unsolicited interaction | ||||||
|  | with those enforcing the Code of Conduct, is allowed during this period. | ||||||
|  | Violating these terms may lead to a permanent ban. | ||||||
|  |  | ||||||
|  | ### 4. Permanent Ban | ||||||
|  |  | ||||||
|  | **Community Impact**: Demonstrating a pattern of violation of community | ||||||
|  | standards, including sustained inappropriate behavior,  harassment of an | ||||||
|  | individual, or aggression toward or disparagement of classes of individuals. | ||||||
|  |  | ||||||
|  | **Consequence**: A permanent ban from any sort of public interaction within | ||||||
|  | the community. | ||||||
|  |  | ||||||
|  | ## Attribution | ||||||
|  |  | ||||||
|  | This Code of Conduct is adapted from the [Contributor Covenant][homepage], | ||||||
|  | version 2.0, available at | ||||||
|  | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. | ||||||
|  |  | ||||||
|  | Community Impact Guidelines were inspired by [Mozilla's code of conduct | ||||||
|  | enforcement ladder](https://github.com/mozilla/diversity). | ||||||
|  |  | ||||||
|  | [homepage]: https://www.contributor-covenant.org | ||||||
|  |  | ||||||
|  | For answers to common questions about this code of conduct, see the FAQ at | ||||||
|  | https://www.contributor-covenant.org/faq. Translations are available at | ||||||
|  | https://www.contributor-covenant.org/translations. | ||||||
							
								
								
									
										4
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Makefile
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | |||||||
| build: | build: | ||||||
| 	mvn -B clean install | 	mvn -B clean install | ||||||
|  |  | ||||||
| ut: | unit-tests: | ||||||
| 	mvn clean test -Punit-tests | 	mvn clean test -Punit-tests | ||||||
|  |  | ||||||
| it: | integration-tests: | ||||||
| 	mvn clean verify -Pintegration-tests | 	mvn clean verify -Pintegration-tests | ||||||
|  |  | ||||||
| doxygen: | doxygen: | ||||||
|   | |||||||
							
								
								
									
										221
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										221
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,31 +1,46 @@ | |||||||
| ### Ollama4j | ### Ollama4j | ||||||
|  |  | ||||||
| <img src='https://raw.githubusercontent.com/amithkoujalgi/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | <p align="center"> | ||||||
|  |   <img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon"> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  |  | ||||||
| A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. | A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. | ||||||
|  |  | ||||||
| Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | Find more details on the [website](https://ollama4j.github.io/ollama4j/). | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [](https://codecov.io/gh/amithkoujalgi/ollama4j) | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  | [//]: # () | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [](https://codecov.io/gh/ollama4j/ollama4j) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
| ## Table of Contents | ## Table of Contents | ||||||
|  |  | ||||||
| - [How does it work?](#how-does-it-work) | - [How does it work?](#how-does-it-work) | ||||||
| - [Requirements](#requirements) | - [Requirements](#requirements) | ||||||
| - [Installation](#installation) | - [Installation](#installation) | ||||||
| - [API Spec](#api-spec) | - [API Spec](https://ollama4j.github.io/ollama4j/docs/category/apis---model-management) | ||||||
| - [Demo APIs](#try-out-the-apis-with-ollama-server) | - [Javadoc](https://ollama4j.github.io/ollama4j/apidocs/) | ||||||
| - [Development](#development) | - [Development](#development) | ||||||
| - [Contributions](#get-involved) | - [Contributions](#get-involved) | ||||||
| - [References](#references) | - [References](#references) | ||||||
| @@ -46,44 +61,132 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). | |||||||
|  |  | ||||||
| #### Requirements | #### Requirements | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| [![][ollama-shield]][ollama] **Or** [![][ollama-docker-shield]][ollama-docker] | [![][ollama-shield]][ollama-link] **Or** [![][ollama-docker-shield]][ollama-docker] | ||||||
|  |  | ||||||
| [ollama]: https://ollama.ai/ | [ollama-link]: https://ollama.ai/ | ||||||
|  |  | ||||||
| [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=just-the-message&labelColor=gray | [ollama-shield]: https://img.shields.io/badge/Ollama-Local_Installation-blue.svg?style=for-the-badge&labelColor=gray | ||||||
|  |  | ||||||
| [ollama-docker]: https://hub.docker.com/r/ollama/ollama | [ollama-docker]: https://hub.docker.com/r/ollama/ollama | ||||||
|  |  | ||||||
| [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=just-the-message&labelColor=gray | [ollama-docker-shield]: https://img.shields.io/badge/Ollama-Docker-blue.svg?style=for-the-badge&labelColor=gray | ||||||
|  |  | ||||||
| #### Installation | ## Installation | ||||||
|  |  | ||||||
| In your Maven project, add this dependency: | > [!NOTE] | ||||||
|  | > We have migrated the package repository from Maven Central to GitHub package repository due to technical issues with | ||||||
|  | > publishing. Please update your repository settings to get latest version of Ollama4j. | ||||||
|  | > | ||||||
|  | > Track the releases [here](https://github.com/ollama4j/ollama4j/releases) and update the dependency version | ||||||
|  | > according to your requirements. | ||||||
|  |  | ||||||
|  | ### For Maven | ||||||
|  |  | ||||||
|  | #### Using [Maven Central](https://central.sonatype.com/) | ||||||
|  |  | ||||||
|  | [![][ollama4j-mvn-releases-shield]][ollama4j-mvn-releases-link] | ||||||
|  |  | ||||||
|  | [ollama4j-mvn-releases-link]: https://github.com/ollama4j/ollama4j/releases | ||||||
|  |  | ||||||
|  | [ollama4j-mvn-releases-shield]: https://img.shields.io/maven-central/v/io.github.ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20Maven%20Central%20 | ||||||
|  |  | ||||||
|  | 1In your Maven project, add this dependency: | ||||||
|  |  | ||||||
| ```xml | ```xml | ||||||
|  |  | ||||||
| <dependency> | <dependency> | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.ollama4j</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.57</version> |     <version>1.0.78</version> | ||||||
| </dependency> | </dependency> | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Latest release: | #### Using GitHub's Maven Package Repository | ||||||
|  |  | ||||||
|  | [![][ollama4j-releases-shield]][ollama4j-releases-link] | ||||||
|  |  | ||||||
| [![][lib-shield]][lib] | [ollama4j-releases-link]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j/overview | ||||||
|  |  | ||||||
| [lib]: https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j | [ollama4j-releases-shield]: https://img.shields.io/github/v/release/ollama4j/ollama4j?display_name=release&style=for-the-badge&label=From%20GitHub%20Packages%20 | ||||||
|  |  | ||||||
|  | 1. Add `GitHub Maven Packages` repository to your project's `pom.xml` or your `settings.xml`: | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <repositories> | ||||||
|  |     <repository> | ||||||
|  |         <id>github</id> | ||||||
|  |         <name>GitHub Apache Maven Packages</name> | ||||||
|  |         <url>https://maven.pkg.github.com/ollama4j/ollama4j</url> | ||||||
|  |         <releases> | ||||||
|  |             <enabled>true</enabled> | ||||||
|  |         </releases> | ||||||
|  |         <snapshots> | ||||||
|  |             <enabled>true</enabled> | ||||||
|  |         </snapshots> | ||||||
|  |     </repository> | ||||||
|  | </repositories> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | 2. Add `GitHub` server to settings.xml. (Usually available at ~/.m2/settings.xml) | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0" | ||||||
|  |           xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||||
|  |           xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 | ||||||
|  |                       http://maven.apache.org/xsd/settings-1.0.0.xsd"> | ||||||
|  |     <servers> | ||||||
|  |         <server> | ||||||
|  |             <id>github</id> | ||||||
|  |             <username>YOUR-USERNAME</username> | ||||||
|  |             <password>YOUR-TOKEN</password> | ||||||
|  |         </server> | ||||||
|  |     </servers> | ||||||
|  | </settings> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | 3. In your Maven project, add this dependency: | ||||||
|  |  | ||||||
|  | ```xml | ||||||
|  |  | ||||||
|  | <dependency> | ||||||
|  |     <groupId>io.github.ollama4j</groupId> | ||||||
|  |     <artifactId>ollama4j</artifactId> | ||||||
|  |     <version>1.0.78</version> | ||||||
|  | </dependency> | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ##### For Gradle | ||||||
|  |  | ||||||
|  | 1. Add the dependency | ||||||
|  |  | ||||||
|  | ```groovy | ||||||
|  | dependencies { | ||||||
|  |   implementation 'com.github.ollama4j:ollama4j:1.0.78' | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | [//]: # (Latest release:) | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # ([![][lib-shield]][lib]) | ||||||
|  |  | ||||||
|  | [lib]: https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j | ||||||
|  |  | ||||||
| [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray | [lib-shield]: https://img.shields.io/badge/ollama4j-get_latest_version-blue.svg?style=just-the-message&labelColor=gray | ||||||
|  |  | ||||||
| #### API Spec | #### API Spec | ||||||
|  |  | ||||||
| Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/). | > [!TIP] | ||||||
|  | > Find the full API specifications on the [website](https://ollama4j.github.io/ollama4j/). | ||||||
|  |  | ||||||
| #### Development | #### Development | ||||||
|  |  | ||||||
| @@ -96,23 +199,32 @@ make build | |||||||
| Run unit tests: | Run unit tests: | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| make ut | make unit-tests | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Run integration tests: | Run integration tests: | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| make it | make integration-tests | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| #### Releases | #### Releases | ||||||
|  |  | ||||||
| Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub | Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch. | ||||||
| Actions CI workflow. |  | ||||||
|  | #### Who's using Ollama4j? | ||||||
|  |  | ||||||
|  | - `Datafaker`: a library to generate fake data | ||||||
|  |     - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api | ||||||
|  | - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j | ||||||
|  |     - https://github.com/TEAMPB/ollama4j-vaadin-ui | ||||||
|  | - `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the | ||||||
|  |   server to translate all messages into a specfic target language. | ||||||
|  |     - https://github.com/liebki/ollama-translator | ||||||
|  |  | ||||||
| #### Traction | #### Traction | ||||||
|  |  | ||||||
| [](https://star-history.com/#amithkoujalgi/ollama4j&Date) | [](https://star-history.com/#ollama4j/ollama4j&Date) | ||||||
|  |  | ||||||
| ### Areas of improvement | ### Areas of improvement | ||||||
|  |  | ||||||
| @@ -124,6 +236,9 @@ Actions CI workflow. | |||||||
| - [x] Use lombok | - [x] Use lombok | ||||||
| - [x] Update request body creation with Java objects | - [x] Update request body creation with Java objects | ||||||
| - [ ] Async APIs for images | - [ ] Async APIs for images | ||||||
|  | - [ ] Support for function calling with models like Mistral | ||||||
|  |     - [x] generate in sync mode | ||||||
|  |     - [ ] generate in async mode | ||||||
| - [ ] Add custom headers to requests | - [ ] Add custom headers to requests | ||||||
| - [x] Add additional params for `ask` APIs such as: | - [x] Add additional params for `ask` APIs such as: | ||||||
|     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - |     - [x] `options`: additional model parameters for the Modelfile such as `temperature` - | ||||||
| @@ -139,15 +254,51 @@ Actions CI workflow. | |||||||
|  |  | ||||||
| ### Get Involved | ### Get Involved | ||||||
|  |  | ||||||
|  | <div align="center"> | ||||||
|  |  | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  | <a href=""></a> | ||||||
|  |  | ||||||
|  | </div> | ||||||
|  |  | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  | [//]: # () | ||||||
|  |  | ||||||
|  |  | ||||||
| Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping | Contributions are most welcome! Whether it's reporting a bug, proposing an enhancement, or helping | ||||||
| with code - any sort | with code - any sort | ||||||
| of contribution is much appreciated. | of contribution is much appreciated. | ||||||
|  |  | ||||||
|  | ### References | ||||||
|  |  | ||||||
|  | - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | ||||||
|  |  | ||||||
| ### Credits | ### Credits | ||||||
|  |  | ||||||
| The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) | ||||||
| project. | project. | ||||||
|  |  | ||||||
| ### References | **Thanks to the amazing contributors** | ||||||
|  |  | ||||||
| - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) | <p align="center"> | ||||||
|  |   <a href="https://github.com/ollama4j/ollama4j/graphs/contributors"> | ||||||
|  |     <img src="https://contrib.rocks/image?repo=ollama4j/ollama4j" /> | ||||||
|  |   </a> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  | ### Appreciate my work? | ||||||
|  |  | ||||||
|  | <p align="center"> | ||||||
|  |   <a href="https://www.buymeacoffee.com/amithkoujalgi" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" style="height: 60px !important;width: 217px !important;" ></a> | ||||||
|  | </p> | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ Hey there, my fellow Java Developers! 🚀 | |||||||
| I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java | I am glad to announce the release of Ollama4j, a library that unites Ollama (an LLM manager and runner) and your Java | ||||||
| applications! 🌐🚀 | applications! 🌐🚀 | ||||||
|  |  | ||||||
| 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/amithkoujalgi/ollama4j) | 👉 GitHub Repository: Ollama4j on GitHub (https://github.com/ollama4j/ollama4j) | ||||||
|  |  | ||||||
| 🌟 Key Features: | 🌟 Key Features: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -31,7 +31,7 @@ Link to [source](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md | |||||||
|  |  | ||||||
| Also, see how to set those Ollama parameters using | Also, see how to set those Ollama parameters using | ||||||
| the `OptionsBuilder` | the `OptionsBuilder` | ||||||
| from [javadoc](https://amithkoujalgi.github.io/ollama4j/apidocs/io/github/amithkoujalgi/ollama4j/core/utils/OptionsBuilder.html). | from [javadoc](https://ollama4j.github.io/ollama4j/apidocs/io/github/ollama4j/ollama4j/core/utils/OptionsBuilder.html). | ||||||
|  |  | ||||||
| ## Build an empty `Options` object | ## Build an empty `Options` object | ||||||
|  |  | ||||||
|   | |||||||
| @@ -20,7 +20,7 @@ public class Main { | |||||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |  | ||||||
|         // create first user question |         // create first user question | ||||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?") |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||||
|                 .build(); |                 .build(); | ||||||
|  |  | ||||||
|         // start conversation with model |         // start conversation with model | ||||||
| @@ -29,7 +29,7 @@ public class Main { | |||||||
|         System.out.println("First answer: " + chatResult.getResponse()); |         System.out.println("First answer: " + chatResult.getResponse()); | ||||||
|  |  | ||||||
|         // create next userQuestion |         // create next userQuestion | ||||||
|         requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build(); |         requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build(); | ||||||
|  |  | ||||||
|         // "continue" conversation with model |         // "continue" conversation with model | ||||||
|         chatResult = ollamaAPI.chat(requestModel); |         chatResult = ollamaAPI.chat(requestModel); | ||||||
| @@ -41,6 +41,7 @@ public class Main { | |||||||
| } | } | ||||||
|  |  | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| You will get a response similar to: | You will get a response similar to: | ||||||
|  |  | ||||||
| > First answer: Should be Paris! | > First answer: Should be Paris! | ||||||
| @@ -50,23 +51,28 @@ You will get a response similar to: | |||||||
| > Chat History: | > Chat History: | ||||||
|  |  | ||||||
| ```json | ```json | ||||||
| [ { | [ | ||||||
|     "role" : "user", |   { | ||||||
|     "content" : "What is the capital of France?", |     "role": "user", | ||||||
|     "images" : [ ] |     "content": "What is the capital of France?", | ||||||
|   }, { |     "images": [] | ||||||
|     "role" : "assistant", |   }, | ||||||
|     "content" : "Should be Paris!", |   { | ||||||
|     "images" : [ ] |     "role": "assistant", | ||||||
|   }, { |     "content": "Should be Paris!", | ||||||
|     "role" : "user", |     "images": [] | ||||||
|     "content" : "And what is the second largest city?", |   }, | ||||||
|     "images" : [ ] |   { | ||||||
|   }, { |     "role": "user", | ||||||
|     "role" : "assistant", |     "content": "And what is the second largest city?", | ||||||
|     "content" : "Marseille.", |     "images": [] | ||||||
|     "images" : [ ] |   }, | ||||||
|   } ] |   { | ||||||
|  |     "role": "assistant", | ||||||
|  |     "content": "Marseille.", | ||||||
|  |     "images": [] | ||||||
|  |   } | ||||||
|  | ] | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Create a conversation where the answer is streamed | ## Create a conversation where the answer is streamed | ||||||
| @@ -89,10 +95,11 @@ public class Main { | |||||||
|             System.out.println(s); |             System.out.println(s); | ||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler); |         OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| You will get a response similar to: | You will get a response similar to: | ||||||
|  |  | ||||||
| > The | > The | ||||||
| @@ -103,8 +110,27 @@ You will get a response similar to: | |||||||
| > The capital of France is Paris | > The capital of France is Paris | ||||||
| > The capital of France is Paris. | > The capital of France is Paris. | ||||||
|  |  | ||||||
|  | ## Use a simple Console Output Stream Handler | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; | ||||||
|  |  | ||||||
|  | public class Main { | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |  | ||||||
|  |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); | ||||||
|  |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") | ||||||
|  |                 .build(); | ||||||
|  |         OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); | ||||||
|  |         ollamaAPI.chat(requestModel, streamHandler); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
| ## Create a new conversation with individual system prompt | ## Create a new conversation with individual system prompt | ||||||
|  |  | ||||||
| ```java | ```java | ||||||
| public class Main { | public class Main { | ||||||
|  |  | ||||||
| @@ -117,7 +143,7 @@ public class Main { | |||||||
|  |  | ||||||
|         // create request with system-prompt (overriding the model defaults) and user question |         // create request with system-prompt (overriding the model defaults) and user question | ||||||
|         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") |         OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") | ||||||
|              .withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?") |                 .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") | ||||||
|                 .build(); |                 .build(); | ||||||
|  |  | ||||||
|         // start conversation with model |         // start conversation with model | ||||||
| @@ -128,6 +154,7 @@ public class Main { | |||||||
| } | } | ||||||
|  |  | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| You will get a response similar to: | You will get a response similar to: | ||||||
|  |  | ||||||
| > NI. | > NI. | ||||||
| @@ -167,6 +194,12 @@ public class Main { | |||||||
|  |  | ||||||
| You will get a response similar to: | You will get a response similar to: | ||||||
|  |  | ||||||
| > First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early evening, given the warm lighting and the low position of the sun in the sky. | > First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two | ||||||
|  | > levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and | ||||||
|  | > comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early | ||||||
|  | > evening, given the warm lighting and the low position of the sun in the sky. | ||||||
| > | > | ||||||
| > Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently. | > Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog | ||||||
|  | > appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever | ||||||
|  | > or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed | ||||||
|  | > confidently. | ||||||
| @@ -5,38 +5,42 @@ sidebar_position: 2 | |||||||
| # Generate - Async | # Generate - Async | ||||||
|  |  | ||||||
| This API lets you ask questions to the LLMs in a asynchronous way. | This API lets you ask questions to the LLMs in a asynchronous way. | ||||||
| These APIs correlate to | This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | background (such as threads) without blocking your code until the response arrives from the model. | ||||||
|  |  | ||||||
|  | This API corresponds to | ||||||
|  | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
|  |  | ||||||
| ```java | ```java | ||||||
| public class Main { | public class Main { | ||||||
|  |  | ||||||
|     public static void main(String[] args) { |     public static void main(String[] args) throws Exception { | ||||||
|  |  | ||||||
|         String host = "http://localhost:11434/"; |         String host = "http://localhost:11434/"; | ||||||
|  |  | ||||||
|         OllamaAPI ollamaAPI = new OllamaAPI(host); |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         ollamaAPI.setRequestTimeoutSeconds(60); | ||||||
|  |         String prompt = "List all cricket world cup teams of 2019."; | ||||||
|  |         OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false); | ||||||
|  |  | ||||||
|         String prompt = "Who are you?"; |         // Set the poll interval according to your needs.  | ||||||
|  |         // Smaller the poll interval, more frequently you receive the tokens. | ||||||
|  |         int pollIntervalMilliseconds = 1000; | ||||||
|  |  | ||||||
|         OllamaAsyncResultCallback callback = ollamaAPI.generateAsync(OllamaModelType.LLAMA2, prompt); |         while (true) { | ||||||
|  |             String tokens = streamer.getStream().poll(); | ||||||
|         while (!callback.isComplete() || !callback.getStream().isEmpty()) { |             System.out.print(tokens); | ||||||
|             // poll for data from the response stream |             if (!streamer.isAlive()) { | ||||||
|             String result = callback.getStream().poll(); |                 break; | ||||||
|             if (result != null) { |  | ||||||
|                 System.out.print(result); |  | ||||||
|             } |             } | ||||||
|             Thread.sleep(100); |             Thread.sleep(pollIntervalMilliseconds); | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         System.out.println("\n------------------------"); | ||||||
|  |         System.out.println("Complete Response:"); | ||||||
|  |         System.out.println("------------------------"); | ||||||
|  |  | ||||||
|  |         System.out.println(streamer.getResult()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| You will get a response similar to: | You will get a steaming response. | ||||||
|  |  | ||||||
| > I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational |  | ||||||
| > manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide |  | ||||||
| > range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that |  | ||||||
| > require |  | ||||||
| > natural language understanding and generation capabilities. |  | ||||||
| @@ -1,12 +1,12 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 3 | sidebar_position: 4 | ||||||
| --- | --- | ||||||
|  |  | ||||||
| # Generate - With Image Files | # Generate - With Image Files | ||||||
|  |  | ||||||
| This API lets you ask questions along with the image files to the LLMs. | This API lets you ask questions along with the image files to the LLMs. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
|  |  | ||||||
| :::note | :::note | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,12 +1,12 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 4 | sidebar_position: 5 | ||||||
| --- | --- | ||||||
|  |  | ||||||
| # Generate - With Image URLs | # Generate - With Image URLs | ||||||
|  |  | ||||||
| This API lets you ask questions along with the image files to the LLMs. | This API lets you ask questions along with the image files to the LLMs. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
|  |  | ||||||
| :::note | :::note | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										368
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										368
									
								
								docs/docs/apis-generate/generate-with-tools.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,368 @@ | |||||||
|  | --- | ||||||
|  | sidebar_position: 3 | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # Generate - With Tools | ||||||
|  |  | ||||||
|  | This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a | ||||||
|  | synchronous way. | ||||||
|  | This API corresponds to | ||||||
|  | the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. | ||||||
|  |  | ||||||
|  | :::note | ||||||
|  |  | ||||||
|  | This is an only an experimental implementation and has a very basic design. | ||||||
|  |  | ||||||
|  | Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign | ||||||
|  | this | ||||||
|  | in the future if tooling is supported for more models with a generic interaction standard from Ollama. | ||||||
|  |  | ||||||
|  | ::: | ||||||
|  |  | ||||||
|  | ### Function Calling/Tools | ||||||
|  |  | ||||||
|  | Assume you want to call a method in your code based on the response generated from the model. | ||||||
|  | For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the | ||||||
|  | transaction from your database and respond to the user with the transaction details. | ||||||
|  |  | ||||||
|  | You could do that with ease with the `function calling` capabilities of the models by registering your `tools`. | ||||||
|  |  | ||||||
|  | ### Create Functions | ||||||
|  |  | ||||||
|  | This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns | ||||||
|  | fuel price value. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("location").toString(); | ||||||
|  |     String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | This function takes the argument `city` and performs an operation with the argument and returns the weather for a | ||||||
|  | location. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | public static String getCurrentWeather(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("city").toString(); | ||||||
|  |     return "Currently " + location + "'s weather is nice."; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | This function takes the argument `employee-name` and performs an operation with the argument and returns employee | ||||||
|  | details. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | class DBQueryFunction implements ToolFunction { | ||||||
|  |     @Override | ||||||
|  |     public Object apply(Map<String, Object> arguments) { | ||||||
|  |         // perform DB operations here | ||||||
|  |         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Define Tool Specifications | ||||||
|  |  | ||||||
|  | Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`). | ||||||
|  | - Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("current-fuel-price") | ||||||
|  |         .functionDescription("Get current fuel price") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                         .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Lets also define a sample tool specification called **Weather Tool** for getting the current weather. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` property (`city`). | ||||||
|  | - Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("current-weather") | ||||||
|  |         .functionDescription("Get current weather") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(SampleTools::getCurrentWeather) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database. | ||||||
|  |  | ||||||
|  | - Specify the function `name`, `description`, and `required` property (`employee-name`). | ||||||
|  | - Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`. | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |         .functionName("get-employee-details") | ||||||
|  |         .functionDescription("Get employee details from the database") | ||||||
|  |         .properties( | ||||||
|  |                 new Tools.PropsBuilder() | ||||||
|  |                         .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||||
|  |                         .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||||
|  |                         .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||||
|  |                         .build() | ||||||
|  |         ) | ||||||
|  |         .toolDefinition(new DBQueryFunction()) | ||||||
|  |         .build(); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Register the Tools | ||||||
|  |  | ||||||
|  | Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||||
|  | ollamaAPI.registerTool(weatherToolSpecification); | ||||||
|  | ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Create prompt with Tools | ||||||
|  |  | ||||||
|  | `Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt1 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the petrol price in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Now, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | `Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt2 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the current weather in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Again, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | `Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools. | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | String prompt3 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withToolSpecification(databaseQueryToolSpecification) | ||||||
|  |                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||||
|  |                 .build(); | ||||||
|  | OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build()); | ||||||
|  | for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |     System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Again, fire away your question to the model. | ||||||
|  |  | ||||||
|  | You will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||||
|  | Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | ### Full Example | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.ToolFunction; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.tools.Tools; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.Arrays; | ||||||
|  | import java.util.Map; | ||||||
|  | import java.util.UUID; | ||||||
|  |  | ||||||
|  | public class FunctionCallingWithMistralExample { | ||||||
|  |     public static void main(String[] args) throws Exception { | ||||||
|  |         String host = "http://localhost:11434/"; | ||||||
|  |         OllamaAPI ollamaAPI = new OllamaAPI(host); | ||||||
|  |         ollamaAPI.setRequestTimeoutSeconds(60); | ||||||
|  |  | ||||||
|  |         String model = "mistral"; | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("current-fuel-price") | ||||||
|  |                 .functionDescription("Get current fuel price") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("location", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                                 .withProperty("fuelType", Tools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(SampleTools::getCurrentFuelPrice) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("current-weather") | ||||||
|  |                 .functionDescription("Get current weather") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("city", Tools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(SampleTools::getCurrentWeather) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder() | ||||||
|  |                 .functionName("get-employee-details") | ||||||
|  |                 .functionDescription("Get employee details from the database") | ||||||
|  |                 .properties( | ||||||
|  |                         new Tools.PropsBuilder() | ||||||
|  |                                 .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build()) | ||||||
|  |                                 .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build()) | ||||||
|  |                                 .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build()) | ||||||
|  |                                 .build() | ||||||
|  |                 ) | ||||||
|  |                 .toolDefinition(new DBQueryFunction()) | ||||||
|  |                 .build(); | ||||||
|  |  | ||||||
|  |         ollamaAPI.registerTool(fuelPriceToolSpecification); | ||||||
|  |         ollamaAPI.registerTool(weatherToolSpecification); | ||||||
|  |         ollamaAPI.registerTool(databaseQueryToolSpecification); | ||||||
|  |  | ||||||
|  |         String prompt1 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the petrol price in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt1); | ||||||
|  |  | ||||||
|  |         String prompt2 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withPrompt("What is the current weather in Bengaluru?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt2); | ||||||
|  |  | ||||||
|  |         String prompt3 = new Tools.PromptBuilder() | ||||||
|  |                 .withToolSpecification(fuelPriceToolSpecification) | ||||||
|  |                 .withToolSpecification(weatherToolSpecification) | ||||||
|  |                 .withToolSpecification(databaseQueryToolSpecification) | ||||||
|  |                 .withPrompt("Give me the details of the employee named 'Rahul Kumar'?") | ||||||
|  |                 .build(); | ||||||
|  |         ask(ollamaAPI, model, prompt3); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||||
|  |         OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build()); | ||||||
|  |         for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) { | ||||||
|  |             System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class SampleTools { | ||||||
|  |     public static String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |         // Get details from fuel price API | ||||||
|  |         String location = arguments.get("location").toString(); | ||||||
|  |         String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |         return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static String getCurrentWeather(Map<String, Object> arguments) { | ||||||
|  |         // Get details from weather API | ||||||
|  |         String location = arguments.get("city").toString(); | ||||||
|  |         return "Currently " + location + "'s weather is nice."; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class DBQueryFunction implements ToolFunction { | ||||||
|  |     @Override | ||||||
|  |     public Object apply(Map<String, Object> arguments) { | ||||||
|  |         // perform DB operations here | ||||||
|  |         return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Run this full example and you will get a response similar to: | ||||||
|  |  | ||||||
|  | ::::tip[LLM Response] | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L | ||||||
|  |  | ||||||
|  | [Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice. | ||||||
|  |  | ||||||
|  | [Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name: | ||||||
|  | Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}` | ||||||
|  |  | ||||||
|  | :::: | ||||||
|  |  | ||||||
|  | ### Room for improvement | ||||||
|  |  | ||||||
|  | Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool | ||||||
|  | registration. For example: | ||||||
|  |  | ||||||
|  | ```java | ||||||
|  |  | ||||||
|  | @ToolSpec(name = "current-fuel-price", desc = "Get current fuel price") | ||||||
|  | public String getCurrentFuelPrice(Map<String, Object> arguments) { | ||||||
|  |     String location = arguments.get("location").toString(); | ||||||
|  |     String fuelType = arguments.get("fuelType").toString(); | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing | ||||||
|  | specific args separately with their data types. For example: | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | public String getCurrentFuelPrice(String location, String fuelType) { | ||||||
|  |     return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Updating async/chat APIs with support for tool-based generation.  | ||||||
| @@ -5,13 +5,13 @@ sidebar_position: 1 | |||||||
| # Generate - Sync | # Generate - Sync | ||||||
|  |  | ||||||
| This API lets you ask questions to the LLMs in a synchronous way. | This API lets you ask questions to the LLMs in a synchronous way. | ||||||
| These APIs correlate to | This API corresponds to | ||||||
| the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) APIs. | the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API. | ||||||
|  |  | ||||||
| Use the `OptionBuilder` to build the `Options` object | Use the `OptionBuilder` to build the `Options` object | ||||||
| with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). | ||||||
| Refer | Refer | ||||||
| to [this](/docs/apis-extras/options-builder). | to [this](/apis-extras/options-builder). | ||||||
|  |  | ||||||
| ## Try asking a question about the model. | ## Try asking a question about the model. | ||||||
|  |  | ||||||
| @@ -61,10 +61,11 @@ public class Main { | |||||||
|                 "What is the capital of France? And what's France's connection with Mona Lisa?", |                 "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|                 new OptionsBuilder().build(), streamHandler); |                 new OptionsBuilder().build(), streamHandler); | ||||||
|  |  | ||||||
|         System.out.println("Full response: " +result.getResponse()); |         System.out.println("Full response: " + result.getResponse()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| You will get a response similar to: | You will get a response similar to: | ||||||
|  |  | ||||||
| > The | > The | ||||||
| @@ -140,7 +141,7 @@ public class Main { | |||||||
| ``` | ``` | ||||||
|  |  | ||||||
| _Note: Here I've used | _Note: Here I've used | ||||||
| a [sample prompt](https://github.com/amithkoujalgi/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt) | ||||||
| containing a database schema from within this library for demonstration purposes._ | containing a database schema from within this library for demonstration purposes._ | ||||||
|  |  | ||||||
| You'd then get a response from the model: | You'd then get a response from the model: | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| --- | --- | ||||||
| sidebar_position: 5 | sidebar_position: 6 | ||||||
| --- | --- | ||||||
|  |  | ||||||
| # Prompt Builder | # Prompt Builder | ||||||
| @@ -42,7 +42,7 @@ public class AskPhi { | |||||||
|                         .addSeparator() |                         .addSeparator() | ||||||
|                         .add("How do I read a file in Go and print its contents to stdout?"); |                         .add("How do I read a file in Go and print its contents to stdout?"); | ||||||
|  |  | ||||||
|         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build()); |         OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); | ||||||
|         System.out.println(response.getResponse()); |         System.out.println(response.getResponse()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -84,7 +84,7 @@ Add the dependency to your project's `pom.xml`. | |||||||
| </dependency> | </dependency> | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.amithkoujalgi/ollama4j). | Find the latest version of the library [here](https://central.sonatype.com/artifact/io.github.ollama4j/ollama4j). | ||||||
|  |  | ||||||
| You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | You might want to include an implementation of [SL4J](https://www.slf4j.org/) logger in your `pom.xml` file. For | ||||||
| example, | example, | ||||||
|   | |||||||
| @@ -20,7 +20,7 @@ const config = { | |||||||
|  |  | ||||||
|     // GitHub pages deployment config. |     // GitHub pages deployment config. | ||||||
|     // If you aren't using GitHub pages, you don't need these. |     // If you aren't using GitHub pages, you don't need these. | ||||||
|     organizationName: 'amithkoujalgi', // Usually your GitHub org/user name. |     organizationName: 'ollama4j', // Usually your GitHub org/user name. | ||||||
|     projectName: 'ollama4j', // Usually your repo name. |     projectName: 'ollama4j', // Usually your repo name. | ||||||
|  |  | ||||||
|     onBrokenLinks: 'throw', |     onBrokenLinks: 'throw', | ||||||
| @@ -40,18 +40,20 @@ const config = { | |||||||
|             /** @type {import('@docusaurus/preset-classic').Options} */ |             /** @type {import('@docusaurus/preset-classic').Options} */ | ||||||
|             ({ |             ({ | ||||||
|                 docs: { |                 docs: { | ||||||
|  |                     path: 'docs', | ||||||
|  |                     routeBasePath: '', // change this to any URL route you'd want. For example: `home` - if you want /home/intro. | ||||||
|                     sidebarPath: './sidebars.js', |                     sidebarPath: './sidebars.js', | ||||||
|                     // Please change this to your repo. |                     // Please change this to your repo. | ||||||
|                     // Remove this to remove the "edit this page" links. |                     // Remove this to remove the "edit this page" links. | ||||||
|                     editUrl: |                     editUrl: | ||||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', |                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||||
|                 }, |                 }, | ||||||
|                 blog: { |                 blog: { | ||||||
|                     showReadingTime: true, |                     showReadingTime: true, | ||||||
|                     // Please change this to your repo. |                     // Please change this to your repo. | ||||||
|                     // Remove this to remove the "edit this page" links. |                     // Remove this to remove the "edit this page" links. | ||||||
|                     editUrl: |                     editUrl: | ||||||
|                         'https://github.com/amithkoujalgi/ollama4j/blob/main/docs', |                         'https://github.com/ollama4j/ollama4j/blob/main/docs', | ||||||
|                 }, |                 }, | ||||||
|                 theme: { |                 theme: { | ||||||
|                     customCss: './src/css/custom.css', |                     customCss: './src/css/custom.css', | ||||||
| @@ -78,11 +80,11 @@ const config = { | |||||||
|                         position: 'left', |                         position: 'left', | ||||||
|                         label: 'Docs', |                         label: 'Docs', | ||||||
|                     }, |                     }, | ||||||
|                     {to: 'https://amithkoujalgi.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, |                     {to: 'https://ollama4j.github.io/ollama4j/apidocs/', label: 'Javadoc', position: 'left'}, | ||||||
|                     {to: 'https://amithkoujalgi.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, |                     {to: 'https://ollama4j.github.io/ollama4j/doxygen/html/', label: 'Doxygen', position: 'left'}, | ||||||
|                     {to: '/blog', label: 'Blog', position: 'left'}, |                     {to: '/blog', label: 'Blog', position: 'left'}, | ||||||
|                     { |                     { | ||||||
|                         href: 'https://github.com/amithkoujalgi/ollama4j', |                         href: 'https://github.com/ollama4j/ollama4j', | ||||||
|                         label: 'GitHub', |                         label: 'GitHub', | ||||||
|                         position: 'right', |                         position: 'right', | ||||||
|                     }, |                     }, | ||||||
| @@ -96,7 +98,7 @@ const config = { | |||||||
|                         items: [ |                         items: [ | ||||||
|                             { |                             { | ||||||
|                                 label: 'Tutorial', |                                 label: 'Tutorial', | ||||||
|                                 to: '/docs/intro', |                                 to: '/intro', | ||||||
|                             }, |                             }, | ||||||
|                         ], |                         ], | ||||||
|                     }, |                     }, | ||||||
| @@ -122,7 +124,7 @@ const config = { | |||||||
|                             }, |                             }, | ||||||
|                             { |                             { | ||||||
|                                 label: 'GitHub', |                                 label: 'GitHub', | ||||||
|                                 href: 'https://github.com/amithkoujalgi/ollama4j', |                                 href: 'https://github.com/ollama4j/ollama4j', | ||||||
|                             }, |                             }, | ||||||
|                         ], |                         ], | ||||||
|                     }, |                     }, | ||||||
|   | |||||||
							
								
								
									
										1947
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1947
									
								
								docs/package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -14,9 +14,9 @@ | |||||||
|     "write-heading-ids": "docusaurus write-heading-ids" |     "write-heading-ids": "docusaurus write-heading-ids" | ||||||
|   }, |   }, | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@docusaurus/core": "3.0.1", |     "@docusaurus/core": "^3.4.0", | ||||||
|     "@docusaurus/preset-classic": "3.0.1", |     "@docusaurus/preset-classic": "^3.4.0", | ||||||
|     "@docusaurus/theme-mermaid": "^3.0.1", |     "@docusaurus/theme-mermaid": "^3.4.0", | ||||||
|     "@mdx-js/react": "^3.0.0", |     "@mdx-js/react": "^3.0.0", | ||||||
|     "clsx": "^2.0.0", |     "clsx": "^2.0.0", | ||||||
|     "prism-react-renderer": "^2.3.0", |     "prism-react-renderer": "^2.3.0", | ||||||
| @@ -24,8 +24,8 @@ | |||||||
|     "react-dom": "^18.0.0" |     "react-dom": "^18.0.0" | ||||||
|   }, |   }, | ||||||
|   "devDependencies": { |   "devDependencies": { | ||||||
|     "@docusaurus/module-type-aliases": "3.0.1", |     "@docusaurus/module-type-aliases": "^3.4.0", | ||||||
|     "@docusaurus/types": "3.0.1" |     "@docusaurus/types": "^3.4.0" | ||||||
|   }, |   }, | ||||||
|   "browserslist": { |   "browserslist": { | ||||||
|     "production": [ |     "production": [ | ||||||
|   | |||||||
| @@ -19,7 +19,7 @@ function HomepageHeader() { | |||||||
|             <div className={styles.buttons}> |             <div className={styles.buttons}> | ||||||
|                 <Link |                 <Link | ||||||
|                     className="button button--secondary button--lg" |                     className="button button--secondary button--lg" | ||||||
|                     to="/docs/intro"> |                     to="/intro"> | ||||||
|                     Getting Started |                     Getting Started | ||||||
|                 </Link> |                 </Link> | ||||||
|             </div> |             </div> | ||||||
|   | |||||||
							
								
								
									
										106
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										106
									
								
								pom.xml
									
									
									
									
									
								
							| @@ -1,14 +1,16 @@ | |||||||
| <?xml version="1.0" encoding="UTF-8"?> | <?xml version="1.0" encoding="UTF-8"?> | ||||||
| <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||||||
|  |          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||||||
|     <modelVersion>4.0.0</modelVersion> |     <modelVersion>4.0.0</modelVersion> | ||||||
|  |  | ||||||
|     <groupId>io.github.amithkoujalgi</groupId> |     <groupId>io.github.ollama4j</groupId> | ||||||
|     <artifactId>ollama4j</artifactId> |     <artifactId>ollama4j</artifactId> | ||||||
|     <version>1.0.61</version> |     <version>ollama4j-revision</version> | ||||||
|  |  | ||||||
|     <name>Ollama4j</name> |     <name>Ollama4j</name> | ||||||
|     <description>Java library for interacting with Ollama API.</description> |     <description>Java library for interacting with Ollama API.</description> | ||||||
|     <url>https://github.com/amithkoujalgi/ollama4j</url> |     <url>https://github.com/ollama4j/ollama4j</url> | ||||||
|  |     <packaging>jar</packaging> | ||||||
|  |  | ||||||
|     <properties> |     <properties> | ||||||
|         <maven.compiler.source>11</maven.compiler.source> |         <maven.compiler.source>11</maven.compiler.source> | ||||||
| @@ -31,15 +33,15 @@ | |||||||
|     <licenses> |     <licenses> | ||||||
|         <license> |         <license> | ||||||
|             <name>MIT License</name> |             <name>MIT License</name> | ||||||
|             <url>https://raw.githubusercontent.com/amithkoujalgi/ollama4j/main/LICENSE</url> |             <url>https://raw.githubusercontent.com/ollama4j/ollama4j/main/LICENSE</url> | ||||||
|         </license> |         </license> | ||||||
|     </licenses> |     </licenses> | ||||||
|  |  | ||||||
|     <scm> |     <scm> | ||||||
|         <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection> |         <connection>scm:git:git@github.com:ollama4j/ollama4j.git</connection> | ||||||
|         <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection> |         <developerConnection>scm:git:https://github.com/ollama4j/ollama4j.git</developerConnection> | ||||||
|         <url>https://github.com/amithkoujalgi/ollama4j</url> |         <url>https://github.com/ollama4j/ollama4j</url> | ||||||
|         <tag>v1.0.61</tag> |         <tag>ollama4j-revision</tag> | ||||||
|     </scm> |     </scm> | ||||||
|  |  | ||||||
|     <build> |     <build> | ||||||
| @@ -70,27 +72,7 @@ | |||||||
|                     </execution> |                     </execution> | ||||||
|                 </executions> |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|             <!--            <plugin>--> |  | ||||||
|             <!--                <groupId>org.apache.maven.plugins</groupId>--> |  | ||||||
|             <!--                <artifactId>maven-gpg-plugin</artifactId>--> |  | ||||||
|             <!--                <version>1.5</version>--> |  | ||||||
|             <!--                <executions>--> |  | ||||||
|             <!--                    <execution>--> |  | ||||||
|             <!--                        <id>sign-artifacts</id>--> |  | ||||||
|             <!--                        <phase>verify</phase>--> |  | ||||||
|             <!--                        <goals>--> |  | ||||||
|             <!--                            <goal>sign</goal>--> |  | ||||||
|             <!--                        </goals>--> |  | ||||||
|             <!--                        <configuration>--> |  | ||||||
|             <!--                            <!– This is necessary for gpg to not try to use the pinentry programs –>--> |  | ||||||
|             <!--                            <gpgArguments>--> |  | ||||||
|             <!--                                <arg>--pinentry-mode</arg>--> |  | ||||||
|             <!--                                <arg>loopback</arg>--> |  | ||||||
|             <!--                            </gpgArguments>--> |  | ||||||
|             <!--                        </configuration>--> |  | ||||||
|             <!--                    </execution>--> |  | ||||||
|             <!--                </executions>--> |  | ||||||
|             <!--            </plugin>--> |  | ||||||
|             <!-- Surefire Plugin for Unit Tests --> |             <!-- Surefire Plugin for Unit Tests --> | ||||||
|             <plugin> |             <plugin> | ||||||
|                 <groupId>org.apache.maven.plugins</groupId> |                 <groupId>org.apache.maven.plugins</groupId> | ||||||
| @@ -127,15 +109,23 @@ | |||||||
|                     </execution> |                     </execution> | ||||||
|                 </executions> |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|  |  | ||||||
|  |  | ||||||
|             <plugin> |             <plugin> | ||||||
|                 <groupId>org.apache.maven.plugins</groupId> |                 <groupId>org.apache.maven.plugins</groupId> | ||||||
|                 <artifactId>maven-release-plugin</artifactId> |                 <artifactId>maven-gpg-plugin</artifactId> | ||||||
|                 <version>3.0.1</version> |                 <version>1.5</version> | ||||||
|                 <configuration> |                 <executions> | ||||||
|                     <!--                    <goals>install</goals>--> |                     <execution> | ||||||
|                     <tagNameFormat>v@{project.version}</tagNameFormat> |                         <id>sign-artifacts</id> | ||||||
|                 </configuration> |                         <phase>verify</phase> | ||||||
|  |                         <goals> | ||||||
|  |                             <goal>sign</goal> | ||||||
|  |                         </goals> | ||||||
|  |                     </execution> | ||||||
|  |                 </executions> | ||||||
|             </plugin> |             </plugin> | ||||||
|  |  | ||||||
|         </plugins> |         </plugins> | ||||||
|     </build> |     </build> | ||||||
|  |  | ||||||
| @@ -149,12 +139,17 @@ | |||||||
|         <dependency> |         <dependency> | ||||||
|             <groupId>com.fasterxml.jackson.core</groupId> |             <groupId>com.fasterxml.jackson.core</groupId> | ||||||
|             <artifactId>jackson-databind</artifactId> |             <artifactId>jackson-databind</artifactId> | ||||||
|             <version>2.15.3</version> |             <version>2.17.1</version> | ||||||
|  |         </dependency> | ||||||
|  |         <dependency> | ||||||
|  |             <groupId>com.fasterxml.jackson.datatype</groupId> | ||||||
|  |             <artifactId>jackson-datatype-jsr310</artifactId> | ||||||
|  |             <version>2.17.1</version> | ||||||
|         </dependency> |         </dependency> | ||||||
|         <dependency> |         <dependency> | ||||||
|             <groupId>ch.qos.logback</groupId> |             <groupId>ch.qos.logback</groupId> | ||||||
|             <artifactId>logback-classic</artifactId> |             <artifactId>logback-classic</artifactId> | ||||||
|             <version>1.4.12</version> |             <version>1.5.6</version> | ||||||
|             <scope>test</scope> |             <scope>test</scope> | ||||||
|         </dependency> |         </dependency> | ||||||
|         <dependency> |         <dependency> | ||||||
| @@ -183,17 +178,38 @@ | |||||||
|     </dependencies> |     </dependencies> | ||||||
|  |  | ||||||
|     <distributionManagement> |     <distributionManagement> | ||||||
|         <snapshotRepository> |  | ||||||
|             <id>ossrh</id> |  | ||||||
|             <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url> |  | ||||||
|         </snapshotRepository> |  | ||||||
|         <repository> |         <repository> | ||||||
|             <id>ossrh</id> |             <id>mvn-repo-id</id> | ||||||
|             <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2</url> |  | ||||||
|         </repository> |         </repository> | ||||||
|     </distributionManagement> |     </distributionManagement> | ||||||
|  |  | ||||||
|     <profiles> |     <profiles> | ||||||
|  |         <profile> | ||||||
|  |             <id>ossrh</id> | ||||||
|  |             <activation> | ||||||
|  |                 <activeByDefault>true</activeByDefault> | ||||||
|  |             </activation> | ||||||
|  |             <properties> | ||||||
|  |                 <gpg.executable>gpg2</gpg.executable> | ||||||
|  |                 <test.env>unit</test.env> | ||||||
|  |                 <skipUnitTests>false</skipUnitTests> | ||||||
|  |                 <skipIntegrationTests>true</skipIntegrationTests> | ||||||
|  |             </properties> | ||||||
|  |             <build> | ||||||
|  |                 <plugins> | ||||||
|  |                     <plugin> | ||||||
|  |                         <groupId>org.sonatype.central</groupId> | ||||||
|  |                         <artifactId>central-publishing-maven-plugin</artifactId> | ||||||
|  |                         <version>0.5.0</version> | ||||||
|  |                         <extensions>true</extensions> | ||||||
|  |                         <configuration> | ||||||
|  |                             <publishingServerId>mvn-repo-id</publishingServerId> | ||||||
|  |                             <autoPublish>true</autoPublish> | ||||||
|  |                         </configuration> | ||||||
|  |                     </plugin> | ||||||
|  |                 </plugins> | ||||||
|  |             </build> | ||||||
|  |         </profile> | ||||||
|         <profile> |         <profile> | ||||||
|             <id>unit-tests</id> |             <id>unit-tests</id> | ||||||
|             <properties> |             <properties> | ||||||
| @@ -202,7 +218,7 @@ | |||||||
|                 <skipIntegrationTests>true</skipIntegrationTests> |                 <skipIntegrationTests>true</skipIntegrationTests> | ||||||
|             </properties> |             </properties> | ||||||
|             <activation> |             <activation> | ||||||
|                 <activeByDefault>true</activeByDefault> |                 <activeByDefault>false</activeByDefault> | ||||||
|             </activation> |             </activation> | ||||||
|             <build> |             <build> | ||||||
|                 <plugins> |                 <plugins> | ||||||
|   | |||||||
| @@ -1,6 +1,8 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core; | package io.github.amithkoujalgi.ollama4j.core; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolInvocationException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.ToolNotFoundException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.*; | import io.github.amithkoujalgi.ollama4j.core.models.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
| @@ -9,18 +11,16 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest; | import io.github.amithkoujalgi.ollama4j.core.models.request.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest; | import io.github.amithkoujalgi.ollama4j.core.tools.*; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Options; | import io.github.amithkoujalgi.ollama4j.core.utils.Options; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| import java.io.BufferedReader; | import lombok.Setter; | ||||||
| import java.io.File; | import org.slf4j.Logger; | ||||||
| import java.io.IOException; | import org.slf4j.LoggerFactory; | ||||||
| import java.io.InputStream; |  | ||||||
| import java.io.InputStreamReader; | import java.io.*; | ||||||
| import java.net.URI; | import java.net.URI; | ||||||
| import java.net.URISyntaxException; | import java.net.URISyntaxException; | ||||||
| import java.net.http.HttpClient; | import java.net.http.HttpClient; | ||||||
| @@ -30,22 +30,32 @@ import java.net.http.HttpResponse; | |||||||
| import java.nio.charset.StandardCharsets; | import java.nio.charset.StandardCharsets; | ||||||
| import java.nio.file.Files; | import java.nio.file.Files; | ||||||
| import java.time.Duration; | import java.time.Duration; | ||||||
| import java.util.ArrayList; | import java.util.*; | ||||||
| import java.util.Base64; |  | ||||||
| import java.util.List; |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| /** The base Ollama API class. */ | /** | ||||||
|  |  * The base Ollama API class. | ||||||
|  |  */ | ||||||
| @SuppressWarnings("DuplicatedCode") | @SuppressWarnings("DuplicatedCode") | ||||||
| public class OllamaAPI { | public class OllamaAPI { | ||||||
|  |  | ||||||
|     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); |     private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); | ||||||
|     private final String host; |     private final String host; | ||||||
|   private long requestTimeoutSeconds = 3; |     /** | ||||||
|  |      * -- SETTER -- | ||||||
|  |      * Set request timeout in seconds. Default is 3 seconds. | ||||||
|  |      */ | ||||||
|  |     @Setter | ||||||
|  |     private long requestTimeoutSeconds = 10; | ||||||
|  |     /** | ||||||
|  |      * -- SETTER -- | ||||||
|  |      * Set/unset logging of responses | ||||||
|  |      */ | ||||||
|  |     @Setter | ||||||
|     private boolean verbose = true; |     private boolean verbose = true; | ||||||
|     private BasicAuth basicAuth; |     private BasicAuth basicAuth; | ||||||
|  |  | ||||||
|  |     private final ToolRegistry toolRegistry = new ToolRegistry(); | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Instantiates the Ollama API. |      * Instantiates the Ollama API. | ||||||
|      * |      * | ||||||
| @@ -59,24 +69,6 @@ public class OllamaAPI { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Set request timeout in seconds. Default is 3 seconds. |  | ||||||
|    * |  | ||||||
|    * @param requestTimeoutSeconds the request timeout in seconds |  | ||||||
|    */ |  | ||||||
|   public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Set/unset logging of responses |  | ||||||
|    * |  | ||||||
|    * @param verbose true/false |  | ||||||
|    */ |  | ||||||
|   public void setVerbose(boolean verbose) { |  | ||||||
|     this.verbose = verbose; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. |      * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. | ||||||
|      * |      * | ||||||
| @@ -323,7 +315,7 @@ public class OllamaAPI { | |||||||
|      * @param modelRequest request for '/api/embeddings' endpoint |      * @param modelRequest request for '/api/embeddings' endpoint | ||||||
|      * @return embeddings |      * @return embeddings | ||||||
|      */ |      */ | ||||||
|   public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{ |     public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { | ||||||
|         URI uri = URI.create(this.host + "/api/embeddings"); |         URI uri = URI.create(this.host + "/api/embeddings"); | ||||||
|         String jsonData = modelRequest.toString(); |         String jsonData = modelRequest.toString(); | ||||||
|         HttpClient httpClient = HttpClient.newHttpClient(); |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
| @@ -344,6 +336,7 @@ public class OllamaAPI { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Generate response for a question to a model running on Ollama server. This is a sync/blocking |      * Generate response for a question to a model running on Ollama server. This is a sync/blocking | ||||||
|      * call. |      * call. | ||||||
| @@ -356,23 +349,67 @@ public class OllamaAPI { | |||||||
|      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. |      * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. | ||||||
|      * @return OllamaResult that includes response text and time taken for response |      * @return OllamaResult that includes response text and time taken for response | ||||||
|      */ |      */ | ||||||
|   public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) |     public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |         ollamaRequestModel.setRaw(raw); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|    * Convenience method to call Ollama API without streaming responses. |      * Generates response using the specified AI model and prompt (in blocking mode). | ||||||
|  |      * <p> | ||||||
|  |      * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} | ||||||
|      * |      * | ||||||
|    * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} |      * @param model   The name or identifier of the AI model to use for generating the response. | ||||||
|  |      * @param prompt  The input text or prompt to provide to the AI model. | ||||||
|  |      * @param raw     In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. | ||||||
|  |      * @param options Additional options or configurations to use when generating the response. | ||||||
|  |      * @return {@link OllamaResult} | ||||||
|      */ |      */ | ||||||
|   public OllamaResult generate(String model, String prompt, Options options) |     public OllamaResult generate(String model, String prompt, boolean raw, Options options) | ||||||
|             throws OllamaBaseException, IOException, InterruptedException { |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     return generate(model, prompt, options,null); |         return generate(model, prompt, raw, options, null); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Generates response using the specified AI model and prompt (in blocking mode), and then invokes a set of tools | ||||||
|  |      * on the generated response. | ||||||
|  |      * | ||||||
|  |      * @param model   The name or identifier of the AI model to use for generating the response. | ||||||
|  |      * @param prompt  The input text or prompt to provide to the AI model. | ||||||
|  |      * @param options Additional options or configurations to use when generating the response. | ||||||
|  |      * @return {@link OllamaToolsResult} An OllamaToolsResult object containing the response from the AI model and the results of invoking the tools on that output. | ||||||
|  |      * @throws OllamaBaseException  If there is an error related to the Ollama API or service. | ||||||
|  |      * @throws IOException          If there is an error related to input/output operations. | ||||||
|  |      * @throws InterruptedException If the method is interrupted while waiting for the AI model | ||||||
|  |      *                              to generate the response or for the tools to be invoked. | ||||||
|  |      */ | ||||||
|  |     public OllamaToolsResult generateWithTools(String model, String prompt, Options options) | ||||||
|  |             throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException { | ||||||
|  |         boolean raw = true; | ||||||
|  |         OllamaToolsResult toolResult = new OllamaToolsResult(); | ||||||
|  |         Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>(); | ||||||
|  |  | ||||||
|  |         OllamaResult result = generate(model, prompt, raw, options, null); | ||||||
|  |         toolResult.setModelResult(result); | ||||||
|  |  | ||||||
|  |         String toolsResponse = result.getResponse(); | ||||||
|  |         if (toolsResponse.contains("[TOOL_CALLS]")) { | ||||||
|  |             toolsResponse = toolsResponse.replace("[TOOL_CALLS]", ""); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         List<ToolFunctionCallSpec> toolFunctionCallSpecs = Utils.getObjectMapper().readValue(toolsResponse, Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolFunctionCallSpec.class)); | ||||||
|  |         for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) { | ||||||
|  |             toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec)); | ||||||
|  |         } | ||||||
|  |         toolResult.setToolResults(toolResults); | ||||||
|  |         return toolResult; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Generate response for a question to a model running on Ollama server and get a callback handle |      * Generate response for a question to a model running on Ollama server and get a callback handle | ||||||
|      * that can be used to check for status and get the response from the model later. This would be |      * that can be used to check for status and get the response from the model later. This would be | ||||||
| @@ -382,15 +419,15 @@ public class OllamaAPI { | |||||||
|      * @param prompt the prompt/question text |      * @param prompt the prompt/question text | ||||||
|      * @return the ollama async result callback handle |      * @return the ollama async result callback handle | ||||||
|      */ |      */ | ||||||
|   public OllamaAsyncResultCallback generateAsync(String model, String prompt) { |     public OllamaAsyncResultStreamer generateAsync(String model, String prompt, boolean raw) { | ||||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); | ||||||
|  |         ollamaRequestModel.setRaw(raw); | ||||||
|         URI uri = URI.create(this.host + "/api/generate"); |         URI uri = URI.create(this.host + "/api/generate"); | ||||||
|     OllamaAsyncResultCallback ollamaAsyncResultCallback = |         OllamaAsyncResultStreamer ollamaAsyncResultStreamer = | ||||||
|         new OllamaAsyncResultCallback( |                 new OllamaAsyncResultStreamer( | ||||||
|                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); |                         getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); | ||||||
|     ollamaAsyncResultCallback.start(); |         ollamaAsyncResultStreamer.start(); | ||||||
|     return ollamaAsyncResultCallback; |         return ollamaAsyncResultStreamer; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
| @@ -415,19 +452,19 @@ public class OllamaAPI { | |||||||
|         } |         } | ||||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Convenience method to call Ollama API without streaming responses. |      * Convenience method to call Ollama API without streaming responses. | ||||||
|    *  |      * <p> | ||||||
|      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} |      * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} | ||||||
|      */ |      */ | ||||||
|     public OllamaResult generateWithImageFiles( |     public OllamaResult generateWithImageFiles( | ||||||
|             String model, String prompt, List<File> imageFiles, Options options) |             String model, String prompt, List<File> imageFiles, Options options) | ||||||
|     throws OllamaBaseException, IOException, InterruptedException{ |             throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         return generateWithImageFiles(model, prompt, imageFiles, options, null); |         return generateWithImageFiles(model, prompt, imageFiles, options, null); | ||||||
| } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * With one or more image URLs, ask a question to a model running on Ollama server. This is a |      * With one or more image URLs, ask a question to a model running on Ollama server. This is a | ||||||
| @@ -451,12 +488,12 @@ public class OllamaAPI { | |||||||
|         } |         } | ||||||
|         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); |         OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); | ||||||
|         ollamaRequestModel.setOptions(options.getOptionsMap()); |         ollamaRequestModel.setOptions(options.getOptionsMap()); | ||||||
|     return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); |         return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Convenience method to call Ollama API without streaming responses. |      * Convenience method to call Ollama API without streaming responses. | ||||||
|    *  |      * <p> | ||||||
|      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} |      * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} | ||||||
|      */ |      */ | ||||||
|     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, |     public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, | ||||||
| @@ -466,7 +503,6 @@ public class OllamaAPI { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|    |  | ||||||
|     /** |     /** | ||||||
|      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api |      * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api | ||||||
|      * 'api/chat'. |      * 'api/chat'. | ||||||
| @@ -478,51 +514,54 @@ public class OllamaAPI { | |||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|   public OllamaChatResult chat(String model, List<OllamaChatMessage> messages)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); |         OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); | ||||||
|         return chat(builder.withMessages(messages).build()); |         return chat(builder.withMessages(messages).build()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|    *  |      * <p> | ||||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|      * |      * | ||||||
|      * @param request request object to be sent to the server |      * @param request request object to be sent to the server | ||||||
|    * @return  |      * @return {@link OllamaChatResult} | ||||||
|      * @throws OllamaBaseException  any response code than 200 has been returned |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|   public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|     return chat(request,null); |         return chat(request, null); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     /** |     /** | ||||||
|      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. |      * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. | ||||||
|    *  |      * <p> | ||||||
|      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. |      * Hint: the OllamaChatRequestModel#getStream() property is not implemented. | ||||||
|      * |      * | ||||||
|      * @param request       request object to be sent to the server |      * @param request       request object to be sent to the server | ||||||
|      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) |      * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) | ||||||
|    * @return  |      * @return {@link OllamaChatResult} | ||||||
|      * @throws OllamaBaseException  any response code than 200 has been returned |      * @throws OllamaBaseException  any response code than 200 has been returned | ||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|   public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); |         OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); | ||||||
|         OllamaResult result; |         OllamaResult result; | ||||||
|     if(streamHandler != null){ |         if (streamHandler != null) { | ||||||
|             request.setStream(true); |             request.setStream(true); | ||||||
|             result = requestCaller.call(request, streamHandler); |             result = requestCaller.call(request, streamHandler); | ||||||
|     } |         } else { | ||||||
|     else { |  | ||||||
|             result = requestCaller.callSync(request); |             result = requestCaller.callSync(request); | ||||||
|         } |         } | ||||||
|         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); |         return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public void registerTool(Tools.ToolSpecification toolSpecification) { | ||||||
|  |         toolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // technical private methods // |     // technical private methods // | ||||||
|  |  | ||||||
|     private static String encodeFileToBase64(File file) throws IOException { |     private static String encodeFileToBase64(File file) throws IOException { | ||||||
| @@ -583,4 +622,22 @@ public class OllamaAPI { | |||||||
|     private boolean isBasicAuthCredentialsSet() { |     private boolean isBasicAuthCredentialsSet() { | ||||||
|         return basicAuth != null; |         return basicAuth != null; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     private Object invokeTool(ToolFunctionCallSpec toolFunctionCallSpec) throws ToolInvocationException { | ||||||
|  |         try { | ||||||
|  |             String methodName = toolFunctionCallSpec.getName(); | ||||||
|  |             Map<String, Object> arguments = toolFunctionCallSpec.getArguments(); | ||||||
|  |             ToolFunction function = toolRegistry.getFunction(methodName); | ||||||
|  |             if (verbose) { | ||||||
|  |                 logger.debug("Invoking function {} with arguments {}", methodName, arguments); | ||||||
|  |             } | ||||||
|  |             if (function == null) { | ||||||
|  |                 throw new ToolNotFoundException("No such tool: " + methodName); | ||||||
|  |             } | ||||||
|  |             return function.apply(arguments); | ||||||
|  |         } catch (Exception e) { | ||||||
|  |             throw new ToolInvocationException("Failed to invoke tool: " + toolFunctionCallSpec.getName(), e); | ||||||
|  |         } | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,18 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core; | ||||||
|  |  | ||||||
|  | import java.util.Iterator; | ||||||
|  | import java.util.LinkedList; | ||||||
|  | import java.util.Queue; | ||||||
|  |  | ||||||
|  | public class OllamaResultStream extends LinkedList<String> implements Queue<String> { | ||||||
|  |     @Override | ||||||
|  |     public String poll() { | ||||||
|  |         StringBuilder tokens = new StringBuilder(); | ||||||
|  |         Iterator<String> iterator = this.listIterator(); | ||||||
|  |         while (iterator.hasNext()) { | ||||||
|  |             tokens.append(iterator.next()); | ||||||
|  |             iterator.remove(); | ||||||
|  |         } | ||||||
|  |         return tokens.toString(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core; |  | ||||||
|  |  | ||||||
| import java.util.function.Consumer; |  | ||||||
|  |  | ||||||
| public interface OllamaStreamHandler extends Consumer<String>{ |  | ||||||
|     void accept(String message); |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.exceptions; | ||||||
|  |  | ||||||
|  | public class ToolInvocationException extends Exception { | ||||||
|  |  | ||||||
|  |     public ToolInvocationException(String s, Exception e) { | ||||||
|  |         super(s, e); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.exceptions; | ||||||
|  |  | ||||||
|  | public class ToolNotFoundException extends Exception { | ||||||
|  |  | ||||||
|  |     public ToolNotFoundException(String s) { | ||||||
|  |         super(s); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,14 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.impl; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  |  | ||||||
|  | public class ConsoleOutputStreamHandler implements OllamaStreamHandler { | ||||||
|  |     private final StringBuffer response = new StringBuffer(); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void accept(String message) { | ||||||
|  |         String substr = message.substring(response.length()); | ||||||
|  |         response.append(substr); | ||||||
|  |         System.out.print(substr); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,5 +1,8 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import java.time.LocalDateTime; | ||||||
|  | import java.time.OffsetDateTime; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| @@ -11,7 +14,9 @@ public class Model { | |||||||
|   private String name; |   private String name; | ||||||
|   private String model; |   private String model; | ||||||
|   @JsonProperty("modified_at") |   @JsonProperty("modified_at") | ||||||
|   private String modifiedAt; |   private OffsetDateTime modifiedAt; | ||||||
|  |   @JsonProperty("expires_at") | ||||||
|  |   private OffsetDateTime expiresAt; | ||||||
|   private String digest; |   private String digest; | ||||||
|   private long size; |   private long size; | ||||||
|   @JsonProperty("details") |   @JsonProperty("details") | ||||||
|   | |||||||
| @@ -1,143 +0,0 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; |  | ||||||
| import java.io.BufferedReader; |  | ||||||
| import java.io.IOException; |  | ||||||
| import java.io.InputStream; |  | ||||||
| import java.io.InputStreamReader; |  | ||||||
| import java.net.http.HttpClient; |  | ||||||
| import java.net.http.HttpRequest; |  | ||||||
| import java.net.http.HttpResponse; |  | ||||||
| import java.nio.charset.StandardCharsets; |  | ||||||
| import java.time.Duration; |  | ||||||
| import java.util.LinkedList; |  | ||||||
| import java.util.Queue; |  | ||||||
| import lombok.Data; |  | ||||||
| import lombok.EqualsAndHashCode; |  | ||||||
| import lombok.Getter; |  | ||||||
|  |  | ||||||
| @Data |  | ||||||
| @EqualsAndHashCode(callSuper = true) |  | ||||||
| @SuppressWarnings("unused") |  | ||||||
| public class OllamaAsyncResultCallback extends Thread { |  | ||||||
|   private final HttpRequest.Builder requestBuilder; |  | ||||||
|   private final OllamaGenerateRequestModel ollamaRequestModel; |  | ||||||
|   private final Queue<String> queue = new LinkedList<>(); |  | ||||||
|   private String result; |  | ||||||
|   private boolean isDone; |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a |  | ||||||
|    * failure. If the request was a failure, the `getResponse()` method will return the error |  | ||||||
|    * message. |  | ||||||
|    */ |  | ||||||
|   @Getter private boolean succeeded; |  | ||||||
|  |  | ||||||
|   private long requestTimeoutSeconds; |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama |  | ||||||
|    * server. |  | ||||||
|    */ |  | ||||||
|   @Getter private int httpStatusCode; |  | ||||||
|  |  | ||||||
|   /** -- GETTER -- Returns the response time in milliseconds. */ |  | ||||||
|   @Getter private long responseTime = 0; |  | ||||||
|  |  | ||||||
|   public OllamaAsyncResultCallback( |  | ||||||
|       HttpRequest.Builder requestBuilder, |  | ||||||
|       OllamaGenerateRequestModel ollamaRequestModel, |  | ||||||
|       long requestTimeoutSeconds) { |  | ||||||
|     this.requestBuilder = requestBuilder; |  | ||||||
|     this.ollamaRequestModel = ollamaRequestModel; |  | ||||||
|     this.isDone = false; |  | ||||||
|     this.result = ""; |  | ||||||
|     this.queue.add(""); |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   @Override |  | ||||||
|   public void run() { |  | ||||||
|     HttpClient httpClient = HttpClient.newHttpClient(); |  | ||||||
|     try { |  | ||||||
|       long startTime = System.currentTimeMillis(); |  | ||||||
|       HttpRequest request = |  | ||||||
|           requestBuilder |  | ||||||
|               .POST( |  | ||||||
|                   HttpRequest.BodyPublishers.ofString( |  | ||||||
|                       Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) |  | ||||||
|               .header("Content-Type", "application/json") |  | ||||||
|               .timeout(Duration.ofSeconds(requestTimeoutSeconds)) |  | ||||||
|               .build(); |  | ||||||
|       HttpResponse<InputStream> response = |  | ||||||
|           httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); |  | ||||||
|       int statusCode = response.statusCode(); |  | ||||||
|       this.httpStatusCode = statusCode; |  | ||||||
|  |  | ||||||
|       InputStream responseBodyStream = response.body(); |  | ||||||
|       try (BufferedReader reader = |  | ||||||
|           new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { |  | ||||||
|         String line; |  | ||||||
|         StringBuilder responseBuffer = new StringBuilder(); |  | ||||||
|         while ((line = reader.readLine()) != null) { |  | ||||||
|           if (statusCode == 404) { |  | ||||||
|             OllamaErrorResponseModel ollamaResponseModel = |  | ||||||
|                 Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); |  | ||||||
|             queue.add(ollamaResponseModel.getError()); |  | ||||||
|             responseBuffer.append(ollamaResponseModel.getError()); |  | ||||||
|           } else { |  | ||||||
|             OllamaGenerateResponseModel ollamaResponseModel = |  | ||||||
|                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); |  | ||||||
|             queue.add(ollamaResponseModel.getResponse()); |  | ||||||
|             if (!ollamaResponseModel.isDone()) { |  | ||||||
|               responseBuffer.append(ollamaResponseModel.getResponse()); |  | ||||||
|             } |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         this.isDone = true; |  | ||||||
|         this.succeeded = true; |  | ||||||
|         this.result = responseBuffer.toString(); |  | ||||||
|         long endTime = System.currentTimeMillis(); |  | ||||||
|         responseTime = endTime - startTime; |  | ||||||
|       } |  | ||||||
|       if (statusCode != 200) { |  | ||||||
|         throw new OllamaBaseException(this.result); |  | ||||||
|       } |  | ||||||
|     } catch (IOException | InterruptedException | OllamaBaseException e) { |  | ||||||
|       this.isDone = true; |  | ||||||
|       this.succeeded = false; |  | ||||||
|       this.result = "[FAILED] " + e.getMessage(); |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Returns the status of the thread. This does not indicate that the request was successful or a |  | ||||||
|    * failure, rather it is just a status flag to indicate if the thread is active or ended. |  | ||||||
|    * |  | ||||||
|    * @return boolean - status |  | ||||||
|    */ |  | ||||||
|   public boolean isComplete() { |  | ||||||
|     return isDone; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    * Returns the final completion/response when the execution completes. Does not return intermediate results. |  | ||||||
|    * |  | ||||||
|    * @return String completion/response text |  | ||||||
|    */ |  | ||||||
|   public String getResponse() { |  | ||||||
|     return result; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public Queue<String> getStream() { |  | ||||||
|     return queue; |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { |  | ||||||
|     this.requestTimeoutSeconds = requestTimeoutSeconds; |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,124 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaResultStream; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.EqualsAndHashCode; | ||||||
|  | import lombok.Getter; | ||||||
|  | import lombok.Setter; | ||||||
|  |  | ||||||
|  | import java.io.BufferedReader; | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.io.InputStream; | ||||||
|  | import java.io.InputStreamReader; | ||||||
|  | import java.net.http.HttpClient; | ||||||
|  | import java.net.http.HttpRequest; | ||||||
|  | import java.net.http.HttpResponse; | ||||||
|  | import java.nio.charset.StandardCharsets; | ||||||
|  | import java.time.Duration; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @EqualsAndHashCode(callSuper = true) | ||||||
|  | @SuppressWarnings("unused") | ||||||
|  | public class OllamaAsyncResultStreamer extends Thread { | ||||||
|  |     private final HttpRequest.Builder requestBuilder; | ||||||
|  |     private final OllamaGenerateRequestModel ollamaRequestModel; | ||||||
|  |     private final OllamaResultStream stream = new OllamaResultStream(); | ||||||
|  |     private String completeResponse; | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the status of the request. Indicates if the request was successful or a | ||||||
|  |      * failure. If the request was a failure, the `getResponse()` method will return the error | ||||||
|  |      * message. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private boolean succeeded; | ||||||
|  |  | ||||||
|  |     @Setter | ||||||
|  |     private long requestTimeoutSeconds; | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the HTTP response status code for the request that was made to Ollama | ||||||
|  |      * server. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private int httpStatusCode; | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * -- GETTER -- Returns the response time in milliseconds. | ||||||
|  |      */ | ||||||
|  |     @Getter | ||||||
|  |     private long responseTime = 0; | ||||||
|  |  | ||||||
|  |     public OllamaAsyncResultStreamer( | ||||||
|  |             HttpRequest.Builder requestBuilder, | ||||||
|  |             OllamaGenerateRequestModel ollamaRequestModel, | ||||||
|  |             long requestTimeoutSeconds) { | ||||||
|  |         this.requestBuilder = requestBuilder; | ||||||
|  |         this.ollamaRequestModel = ollamaRequestModel; | ||||||
|  |         this.completeResponse = ""; | ||||||
|  |         this.stream.add(""); | ||||||
|  |         this.requestTimeoutSeconds = requestTimeoutSeconds; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void run() { | ||||||
|  |         ollamaRequestModel.setStream(true); | ||||||
|  |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
|  |         try { | ||||||
|  |             long startTime = System.currentTimeMillis(); | ||||||
|  |             HttpRequest request = | ||||||
|  |                     requestBuilder | ||||||
|  |                             .POST( | ||||||
|  |                                     HttpRequest.BodyPublishers.ofString( | ||||||
|  |                                             Utils.getObjectMapper().writeValueAsString(ollamaRequestModel))) | ||||||
|  |                             .header("Content-Type", "application/json") | ||||||
|  |                             .timeout(Duration.ofSeconds(requestTimeoutSeconds)) | ||||||
|  |                             .build(); | ||||||
|  |             HttpResponse<InputStream> response = | ||||||
|  |                     httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||||
|  |             int statusCode = response.statusCode(); | ||||||
|  |             this.httpStatusCode = statusCode; | ||||||
|  |  | ||||||
|  |             InputStream responseBodyStream = response.body(); | ||||||
|  |             try (BufferedReader reader = | ||||||
|  |                          new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { | ||||||
|  |                 String line; | ||||||
|  |                 StringBuilder responseBuffer = new StringBuilder(); | ||||||
|  |                 while ((line = reader.readLine()) != null) { | ||||||
|  |                     if (statusCode == 404) { | ||||||
|  |                         OllamaErrorResponseModel ollamaResponseModel = | ||||||
|  |                                 Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); | ||||||
|  |                         stream.add(ollamaResponseModel.getError()); | ||||||
|  |                         responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|  |                     } else { | ||||||
|  |                         OllamaGenerateResponseModel ollamaResponseModel = | ||||||
|  |                                 Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|  |                         String res = ollamaResponseModel.getResponse(); | ||||||
|  |                         stream.add(res); | ||||||
|  |                         if (!ollamaResponseModel.isDone()) { | ||||||
|  |                             responseBuffer.append(res); | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 this.succeeded = true; | ||||||
|  |                 this.completeResponse = responseBuffer.toString(); | ||||||
|  |                 long endTime = System.currentTimeMillis(); | ||||||
|  |                 responseTime = endTime - startTime; | ||||||
|  |             } | ||||||
|  |             if (statusCode != 200) { | ||||||
|  |                 throw new OllamaBaseException(this.completeResponse); | ||||||
|  |             } | ||||||
|  |         } catch (IOException | InterruptedException | OllamaBaseException e) { | ||||||
|  |             this.succeeded = false; | ||||||
|  |             this.completeResponse = "[FAILED] " + e.getMessage(); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -1,14 +1,15 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.annotation.JsonProperty; | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
| import java.util.List; | import java.util.List; | ||||||
| import lombok.Data; |  | ||||||
|  |  | ||||||
| @Data | @Data | ||||||
| public class OllamaChatResponseModel { | public class OllamaChatResponseModel { | ||||||
|     private String model; |     private String model; | ||||||
|     private @JsonProperty("created_at") String createdAt; |     private @JsonProperty("created_at") String createdAt; | ||||||
|  |     private @JsonProperty("done_reason") String doneReason; | ||||||
|     private OllamaChatMessage message; |     private OllamaChatMessage message; | ||||||
|     private boolean done; |     private boolean done; | ||||||
|     private String error; |     private String error; | ||||||
|   | |||||||
| @@ -1,10 +1,10 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.chat; | package io.github.amithkoujalgi.ollama4j.core.models.chat; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
|  |  | ||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.List; | import java.util.List; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; |  | ||||||
|  |  | ||||||
| public class OllamaChatStreamObserver { | public class OllamaChatStreamObserver { | ||||||
|  |  | ||||||
|     private OllamaStreamHandler streamHandler; |     private OllamaStreamHandler streamHandler; | ||||||
| @@ -17,12 +17,12 @@ public class OllamaChatStreamObserver { | |||||||
|         this.streamHandler = streamHandler; |         this.streamHandler = streamHandler; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public void notify(OllamaChatResponseModel currentResponsePart){ |     public void notify(OllamaChatResponseModel currentResponsePart) { | ||||||
|         responseParts.add(currentResponsePart); |         responseParts.add(currentResponsePart); | ||||||
|         handleCurrentResponsePart(currentResponsePart); |         handleCurrentResponsePart(currentResponsePart); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){ |     protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart) { | ||||||
|         message = message + currentResponsePart.getMessage().getContent(); |         message = message + currentResponsePart.getMessage().getContent(); | ||||||
|         streamHandler.accept(message); |         streamHandler.accept(message); | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -3,8 +3,6 @@ package io.github.amithkoujalgi.ollama4j.core.models.generate; | |||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.List; | import java.util.List; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; |  | ||||||
|  |  | ||||||
| public class OllamaGenerateStreamObserver { | public class OllamaGenerateStreamObserver { | ||||||
|  |  | ||||||
|     private OllamaStreamHandler streamHandler; |     private OllamaStreamHandler streamHandler; | ||||||
| @@ -17,12 +15,12 @@ public class OllamaGenerateStreamObserver { | |||||||
|         this.streamHandler = streamHandler; |         this.streamHandler = streamHandler; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public void notify(OllamaGenerateResponseModel currentResponsePart){ |     public void notify(OllamaGenerateResponseModel currentResponsePart) { | ||||||
|         responseParts.add(currentResponsePart); |         responseParts.add(currentResponsePart); | ||||||
|         handleCurrentResponsePart(currentResponsePart); |         handleCurrentResponsePart(currentResponsePart); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){ |     protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart) { | ||||||
|         message = message + currentResponsePart.getResponse(); |         message = message + currentResponsePart.getResponse(); | ||||||
|         streamHandler.accept(message); |         streamHandler.accept(message); | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.models.generate; | ||||||
|  |  | ||||||
|  | import java.util.function.Consumer; | ||||||
|  |  | ||||||
|  | public interface OllamaStreamHandler extends Consumer<String> { | ||||||
|  |     void accept(String message); | ||||||
|  | } | ||||||
| @@ -1,25 +1,23 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
| import java.io.IOException; |  | ||||||
|  |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Specialization class for requests |  * Specialization class for requests | ||||||
|  */ |  */ | ||||||
| public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ | public class OllamaChatEndpointCaller extends OllamaEndpointCaller { | ||||||
|  |  | ||||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); |     private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); | ||||||
|  |  | ||||||
| @@ -39,12 +37,12 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ | |||||||
|         try { |         try { | ||||||
|             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); |             OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); | ||||||
|             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); |             responseBuffer.append(ollamaResponseModel.getMessage().getContent()); | ||||||
|             if(streamObserver != null) { |             if (streamObserver != null) { | ||||||
|                 streamObserver.notify(ollamaResponseModel); |                 streamObserver.notify(ollamaResponseModel); | ||||||
|             } |             } | ||||||
|             return ollamaResponseModel.isDone(); |             return ollamaResponseModel.isDone(); | ||||||
|         } catch (JsonProcessingException e) { |         } catch (JsonProcessingException e) { | ||||||
|             LOG.error("Error parsing the Ollama chat response!",e); |             LOG.error("Error parsing the Ollama chat response!", e); | ||||||
|             return true; |             return true; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| @@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ | |||||||
|         streamObserver = new OllamaChatStreamObserver(streamHandler); |         streamObserver = new OllamaChatStreamObserver(streamHandler); | ||||||
|         return super.callSync(body); |         return super.callSync(body); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|      |  | ||||||
|   |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,5 +1,15 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import java.io.BufferedReader; | import java.io.BufferedReader; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.io.InputStream; | import java.io.InputStream; | ||||||
| @@ -12,17 +22,6 @@ import java.nio.charset.StandardCharsets; | |||||||
| import java.time.Duration; | import java.time.Duration; | ||||||
| import java.util.Base64; | import java.util.Base64; | ||||||
|  |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; |  | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Abstract helperclass to call the ollama api server. |  * Abstract helperclass to call the ollama api server. | ||||||
|  */ |  */ | ||||||
| @@ -56,8 +55,7 @@ public abstract class OllamaEndpointCaller { | |||||||
|      * @throws IOException          in case the responseStream can not be read |      * @throws IOException          in case the responseStream can not be read | ||||||
|      * @throws InterruptedException in case the server is not reachable or network issues happen |      * @throws InterruptedException in case the server is not reachable or network issues happen | ||||||
|      */ |      */ | ||||||
|     public OllamaResult callSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{ |     public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException { | ||||||
|  |  | ||||||
|         // Create Request |         // Create Request | ||||||
|         long startTime = System.currentTimeMillis(); |         long startTime = System.currentTimeMillis(); | ||||||
|         HttpClient httpClient = HttpClient.newHttpClient(); |         HttpClient httpClient = HttpClient.newHttpClient(); | ||||||
| @@ -71,7 +69,6 @@ public abstract class OllamaEndpointCaller { | |||||||
|         HttpResponse<InputStream> response = |         HttpResponse<InputStream> response = | ||||||
|                 httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); |                 httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); | ||||||
|  |  | ||||||
|          |  | ||||||
|         int statusCode = response.statusCode(); |         int statusCode = response.statusCode(); | ||||||
|         InputStream responseBodyStream = response.body(); |         InputStream responseBodyStream = response.body(); | ||||||
|         StringBuilder responseBuffer = new StringBuilder(); |         StringBuilder responseBuffer = new StringBuilder(); | ||||||
| @@ -96,7 +93,7 @@ public abstract class OllamaEndpointCaller { | |||||||
|                             OllamaErrorResponseModel.class); |                             OllamaErrorResponseModel.class); | ||||||
|                     responseBuffer.append(ollamaResponseModel.getError()); |                     responseBuffer.append(ollamaResponseModel.getError()); | ||||||
|                 } else { |                 } else { | ||||||
|           boolean finished = parseResponseAndAddToBuffer(line,responseBuffer); |                     boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); | ||||||
|                     if (finished) { |                     if (finished) { | ||||||
|                         break; |                         break; | ||||||
|                     } |                     } | ||||||
|   | |||||||
| @@ -1,20 +1,20 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.core.models.request; | package io.github.amithkoujalgi.ollama4j.core.models.request; | ||||||
|  |  | ||||||
| import java.io.IOException; |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.core.JsonProcessingException; | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaStreamHandler; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | import java.io.IOException; | ||||||
|  |  | ||||||
|  | public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { | ||||||
|  |  | ||||||
|     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); |     private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); | ||||||
|  |  | ||||||
| @@ -34,12 +34,12 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | |||||||
|         try { |         try { | ||||||
|             OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); |             OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); | ||||||
|             responseBuffer.append(ollamaResponseModel.getResponse()); |             responseBuffer.append(ollamaResponseModel.getResponse()); | ||||||
|                     if(streamObserver != null) { |             if (streamObserver != null) { | ||||||
|                 streamObserver.notify(ollamaResponseModel); |                 streamObserver.notify(ollamaResponseModel); | ||||||
|             } |             } | ||||||
|             return ollamaResponseModel.isDone(); |             return ollamaResponseModel.isDone(); | ||||||
|         } catch (JsonProcessingException e) { |         } catch (JsonProcessingException e) { | ||||||
|                     LOG.error("Error parsing the Ollama chat response!",e); |             LOG.error("Error parsing the Ollama chat response!", e); | ||||||
|             return true; |             return true; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| @@ -49,6 +49,4 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ | |||||||
|         streamObserver = new OllamaGenerateStreamObserver(streamHandler); |         streamObserver = new OllamaGenerateStreamObserver(streamHandler); | ||||||
|         return super.callSync(body); |         return super.callSync(body); | ||||||
|     } |     } | ||||||
|      |  | ||||||
|      |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,35 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
|  | import lombok.AllArgsConstructor; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @NoArgsConstructor | ||||||
|  | @AllArgsConstructor | ||||||
|  | public class OllamaToolsResult { | ||||||
|  |     private OllamaResult modelResult; | ||||||
|  |     private Map<ToolFunctionCallSpec, Object> toolResults; | ||||||
|  |  | ||||||
|  |     public List<ToolResult> getToolResults() { | ||||||
|  |         List<ToolResult> results = new ArrayList<>(); | ||||||
|  |         for (Map.Entry<ToolFunctionCallSpec, Object> r : this.toolResults.entrySet()) { | ||||||
|  |             results.add(new ToolResult(r.getKey().getName(), r.getKey().getArguments(), r.getValue())); | ||||||
|  |         } | ||||||
|  |         return results; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Data | ||||||
|  |     @NoArgsConstructor | ||||||
|  |     @AllArgsConstructor | ||||||
|  |     public static class ToolResult { | ||||||
|  |         private String functionName; | ||||||
|  |         private Map<String, Object> functionArguments; | ||||||
|  |         private Object result; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @FunctionalInterface | ||||||
|  | public interface ToolFunction { | ||||||
|  |     Object apply(Map<String, Object> arguments); | ||||||
|  | } | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import lombok.AllArgsConstructor; | ||||||
|  | import lombok.Data; | ||||||
|  | import lombok.NoArgsConstructor; | ||||||
|  |  | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | @Data | ||||||
|  | @AllArgsConstructor | ||||||
|  | @NoArgsConstructor | ||||||
|  | public class ToolFunctionCallSpec { | ||||||
|  |     private String name; | ||||||
|  |     private Map<String, Object> arguments; | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import java.util.HashMap; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | public class ToolRegistry { | ||||||
|  |     private final Map<String, ToolFunction> functionMap = new HashMap<>(); | ||||||
|  |  | ||||||
|  |     public ToolFunction getFunction(String name) { | ||||||
|  |         return functionMap.get(name); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void addFunction(String name, ToolFunction function) { | ||||||
|  |         functionMap.put(name, function); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.core.tools; | ||||||
|  |  | ||||||
|  | import com.fasterxml.jackson.annotation.JsonIgnore; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonInclude; | ||||||
|  | import com.fasterxml.jackson.annotation.JsonProperty; | ||||||
|  | import com.fasterxml.jackson.core.JsonProcessingException; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
|  | import lombok.Builder; | ||||||
|  | import lombok.Data; | ||||||
|  |  | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.HashMap; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | public class Tools { | ||||||
|  |     @Data | ||||||
|  |     @Builder | ||||||
|  |     public static class ToolSpecification { | ||||||
|  |         private String functionName; | ||||||
|  |         private String functionDescription; | ||||||
|  |         private Map<String, PromptFuncDefinition.Property> properties; | ||||||
|  |         private ToolFunction toolDefinition; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Data | ||||||
|  |     @JsonIgnoreProperties(ignoreUnknown = true) | ||||||
|  |     public static class PromptFuncDefinition { | ||||||
|  |         private String type; | ||||||
|  |         private PromptFuncSpec function; | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         public static class PromptFuncSpec { | ||||||
|  |             private String name; | ||||||
|  |             private String description; | ||||||
|  |             private Parameters parameters; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         public static class Parameters { | ||||||
|  |             private String type; | ||||||
|  |             private Map<String, Property> properties; | ||||||
|  |             private List<String> required; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         @Data | ||||||
|  |         @Builder | ||||||
|  |         public static class Property { | ||||||
|  |             private String type; | ||||||
|  |             private String description; | ||||||
|  |             @JsonProperty("enum") | ||||||
|  |             @JsonInclude(JsonInclude.Include.NON_NULL) | ||||||
|  |             private List<String> enumValues; | ||||||
|  |             @JsonIgnore | ||||||
|  |             private boolean required; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static class PropsBuilder { | ||||||
|  |         private final Map<String, PromptFuncDefinition.Property> props = new HashMap<>(); | ||||||
|  |  | ||||||
|  |         public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) { | ||||||
|  |             props.put(key, property); | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public Map<String, PromptFuncDefinition.Property> build() { | ||||||
|  |             return props; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public static class PromptBuilder { | ||||||
|  |         private final List<PromptFuncDefinition> tools = new ArrayList<>(); | ||||||
|  |  | ||||||
|  |         private String promptText; | ||||||
|  |  | ||||||
|  |         public String build() throws JsonProcessingException { | ||||||
|  |             return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]"; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public PromptBuilder withPrompt(String prompt) throws JsonProcessingException { | ||||||
|  |             promptText = prompt; | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public PromptBuilder withToolSpecification(ToolSpecification spec) { | ||||||
|  |             PromptFuncDefinition def = new PromptFuncDefinition(); | ||||||
|  |             def.setType("function"); | ||||||
|  |  | ||||||
|  |             PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); | ||||||
|  |             functionDetail.setName(spec.getFunctionName()); | ||||||
|  |             functionDetail.setDescription(spec.getFunctionDescription()); | ||||||
|  |  | ||||||
|  |             PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); | ||||||
|  |             parameters.setType("object"); | ||||||
|  |             parameters.setProperties(spec.getProperties()); | ||||||
|  |  | ||||||
|  |             List<String> requiredValues = new ArrayList<>(); | ||||||
|  |             for (Map.Entry<String, PromptFuncDefinition.Property> p : spec.getProperties().entrySet()) { | ||||||
|  |                 if (p.getValue().isRequired()) { | ||||||
|  |                     requiredValues.add(p.getKey()); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             parameters.setRequired(requiredValues); | ||||||
|  |             functionDetail.setParameters(parameters); | ||||||
|  |             def.setFunction(functionDetail); | ||||||
|  |  | ||||||
|  |             tools.add(def); | ||||||
|  |             return this; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -9,16 +9,22 @@ package io.github.amithkoujalgi.ollama4j.core.types; | |||||||
| @SuppressWarnings("ALL") | @SuppressWarnings("ALL") | ||||||
| public class OllamaModelType { | public class OllamaModelType { | ||||||
|     public static final String GEMMA = "gemma"; |     public static final String GEMMA = "gemma"; | ||||||
|  |     public static final String GEMMA2 = "gemma2"; | ||||||
|  |  | ||||||
|  |  | ||||||
|     public static final String LLAMA2 = "llama2"; |     public static final String LLAMA2 = "llama2"; | ||||||
|  |     public static final String LLAMA3 = "llama3"; | ||||||
|     public static final String MISTRAL = "mistral"; |     public static final String MISTRAL = "mistral"; | ||||||
|     public static final String MIXTRAL = "mixtral"; |     public static final String MIXTRAL = "mixtral"; | ||||||
|     public static final String LLAVA = "llava"; |     public static final String LLAVA = "llava"; | ||||||
|  |     public static final String LLAVA_PHI3 = "llava-phi3"; | ||||||
|     public static final String NEURAL_CHAT = "neural-chat"; |     public static final String NEURAL_CHAT = "neural-chat"; | ||||||
|     public static final String CODELLAMA = "codellama"; |     public static final String CODELLAMA = "codellama"; | ||||||
|     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; |     public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; | ||||||
|     public static final String MISTRAL_OPENORCA = "mistral-openorca"; |     public static final String MISTRAL_OPENORCA = "mistral-openorca"; | ||||||
|     public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; |     public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; | ||||||
|     public static final String PHI = "phi"; |     public static final String PHI = "phi"; | ||||||
|  |     public static final String PHI3 = "phi3"; | ||||||
|     public static final String ORCA_MINI = "orca-mini"; |     public static final String ORCA_MINI = "orca-mini"; | ||||||
|     public static final String DEEPSEEK_CODER = "deepseek-coder"; |     public static final String DEEPSEEK_CODER = "deepseek-coder"; | ||||||
|     public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; |     public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; | ||||||
| @@ -27,6 +33,8 @@ public class OllamaModelType { | |||||||
|     public static final String ZEPHYR = "zephyr"; |     public static final String ZEPHYR = "zephyr"; | ||||||
|     public static final String OPENHERMES = "openhermes"; |     public static final String OPENHERMES = "openhermes"; | ||||||
|     public static final String QWEN = "qwen"; |     public static final String QWEN = "qwen"; | ||||||
|  |  | ||||||
|  |     public static final String QWEN2 = "qwen2"; | ||||||
|     public static final String WIZARDCODER = "wizardcoder"; |     public static final String WIZARDCODER = "wizardcoder"; | ||||||
|     public static final String LLAMA2_CHINESE = "llama2-chinese"; |     public static final String LLAMA2_CHINESE = "llama2-chinese"; | ||||||
|     public static final String TINYLLAMA = "tinyllama"; |     public static final String TINYLLAMA = "tinyllama"; | ||||||
| @@ -76,4 +84,5 @@ public class OllamaModelType { | |||||||
|     public static final String NOTUS = "notus"; |     public static final String NOTUS = "notus"; | ||||||
|     public static final String DUCKDB_NSQL = "duckdb-nsql"; |     public static final String DUCKDB_NSQL = "duckdb-nsql"; | ||||||
|     public static final String ALL_MINILM = "all-minilm"; |     public static final String ALL_MINILM = "all-minilm"; | ||||||
|  |     public static final String CODESTRAL = "codestral"; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,10 +8,18 @@ import java.net.URISyntaxException; | |||||||
| import java.net.URL; | import java.net.URL; | ||||||
|  |  | ||||||
| import com.fasterxml.jackson.databind.ObjectMapper; | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
|  | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; | ||||||
|  |  | ||||||
| public class Utils { | public class Utils { | ||||||
|  |  | ||||||
|  |   private static ObjectMapper objectMapper; | ||||||
|  |  | ||||||
|   public static ObjectMapper getObjectMapper() { |   public static ObjectMapper getObjectMapper() { | ||||||
|     return new ObjectMapper(); |     if(objectMapper == null) { | ||||||
|  |       objectMapper = new ObjectMapper(); | ||||||
|  |       objectMapper.registerModule(new JavaTimeModule()); | ||||||
|  |     } | ||||||
|  |     return objectMapper; | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   public static byte[] loadImageBytesFromUrl(String imageUrl) |   public static byte[] loadImageBytesFromUrl(String imageUrl) | ||||||
|   | |||||||
| @@ -1,7 +1,5 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.integrationtests; | package io.github.amithkoujalgi.ollama4j.integrationtests; | ||||||
|  |  | ||||||
| import static org.junit.jupiter.api.Assertions.*; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||||
| @@ -10,9 +8,16 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  | import lombok.Data; | ||||||
|  | import org.junit.jupiter.api.BeforeEach; | ||||||
|  | import org.junit.jupiter.api.Order; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| import java.io.File; | import java.io.File; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.io.InputStream; | import java.io.InputStream; | ||||||
| @@ -22,12 +27,8 @@ import java.net.http.HttpConnectTimeoutException; | |||||||
| import java.util.List; | import java.util.List; | ||||||
| import java.util.Objects; | import java.util.Objects; | ||||||
| import java.util.Properties; | import java.util.Properties; | ||||||
| import lombok.Data; |  | ||||||
| import org.junit.jupiter.api.BeforeEach; | import static org.junit.jupiter.api.Assertions.*; | ||||||
| import org.junit.jupiter.api.Order; |  | ||||||
| import org.junit.jupiter.api.Test; |  | ||||||
| import org.slf4j.Logger; |  | ||||||
| import org.slf4j.LoggerFactory; |  | ||||||
|  |  | ||||||
| class TestRealAPIs { | class TestRealAPIs { | ||||||
|  |  | ||||||
| @@ -116,6 +117,7 @@ class TestRealAPIs { | |||||||
|                     ollamaAPI.generate( |                     ollamaAPI.generate( | ||||||
|                             config.getModel(), |                             config.getModel(), | ||||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", |                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                             false, | ||||||
|                             new OptionsBuilder().build()); |                             new OptionsBuilder().build()); | ||||||
|             assertNotNull(result); |             assertNotNull(result); | ||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
| @@ -130,11 +132,10 @@ class TestRealAPIs { | |||||||
|     void testAskModelWithDefaultOptionsStreamed() { |     void testAskModelWithDefaultOptionsStreamed() { | ||||||
|         testEndpointReachability(); |         testEndpointReachability(); | ||||||
|         try { |         try { | ||||||
|  |  | ||||||
|             StringBuffer sb = new StringBuffer(""); |             StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|             OllamaResult result = ollamaAPI.generate(config.getModel(), |             OllamaResult result = ollamaAPI.generate(config.getModel(), | ||||||
|                     "What is the capital of France? And what's France's connection with Mona Lisa?", |                     "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                     false, | ||||||
|                     new OptionsBuilder().build(), (s) -> { |                     new OptionsBuilder().build(), (s) -> { | ||||||
|                         LOG.info(s); |                         LOG.info(s); | ||||||
|                         String substring = s.substring(sb.toString().length(), s.length()); |                         String substring = s.substring(sb.toString().length(), s.length()); | ||||||
| @@ -160,6 +161,7 @@ class TestRealAPIs { | |||||||
|                     ollamaAPI.generate( |                     ollamaAPI.generate( | ||||||
|                             config.getModel(), |                             config.getModel(), | ||||||
|                             "What is the capital of France? And what's France's connection with Mona Lisa?", |                             "What is the capital of France? And what's France's connection with Mona Lisa?", | ||||||
|  |                             true, | ||||||
|                             new OptionsBuilder().setTemperature(0.9f).build()); |                             new OptionsBuilder().setTemperature(0.9f).build()); | ||||||
|             assertNotNull(result); |             assertNotNull(result); | ||||||
|             assertNotNull(result.getResponse()); |             assertNotNull(result.getResponse()); | ||||||
| @@ -177,13 +179,13 @@ class TestRealAPIs { | |||||||
|             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); |             OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); | ||||||
|             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") |             OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") | ||||||
|                     .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") |                     .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") | ||||||
|              .withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?") |                     .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") | ||||||
|                     .build(); |                     .build(); | ||||||
|  |  | ||||||
|             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel); | ||||||
|             assertNotNull(chatResult); |             assertNotNull(chatResult); | ||||||
|             assertFalse(chatResult.getResponse().isBlank()); |             assertFalse(chatResult.getResponse().isBlank()); | ||||||
|       assertEquals(4,chatResult.getChatHistory().size()); |             assertEquals(4, chatResult.getChatHistory().size()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|             fail(e); |             fail(e); | ||||||
|         } |         } | ||||||
| @@ -223,7 +225,7 @@ class TestRealAPIs { | |||||||
|  |  | ||||||
|             StringBuffer sb = new StringBuffer(""); |             StringBuffer sb = new StringBuffer(""); | ||||||
|  |  | ||||||
|       OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> { |             OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> { | ||||||
|                 LOG.info(s); |                 LOG.info(s); | ||||||
|                 String substring = s.substring(sb.toString().length(), s.length()); |                 String substring = s.substring(sb.toString().length(), s.length()); | ||||||
|                 LOG.info(substring); |                 LOG.info(substring); | ||||||
|   | |||||||
| @@ -1,20 +1,21 @@ | |||||||
| package io.github.amithkoujalgi.ollama4j.unittests; | package io.github.amithkoujalgi.ollama4j.unittests; | ||||||
|  |  | ||||||
| import static org.mockito.Mockito.*; |  | ||||||
|  |  | ||||||
| import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultStreamer; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  | import org.mockito.Mockito; | ||||||
|  |  | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.net.URISyntaxException; | import java.net.URISyntaxException; | ||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
| import java.util.Collections; | import java.util.Collections; | ||||||
| import org.junit.jupiter.api.Test; |  | ||||||
| import org.mockito.Mockito; | import static org.mockito.Mockito.*; | ||||||
|  |  | ||||||
| class TestMockedAPIs { | class TestMockedAPIs { | ||||||
|     @Test |     @Test | ||||||
| @@ -103,10 +104,10 @@ class TestMockedAPIs { | |||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|         OptionsBuilder optionsBuilder = new OptionsBuilder(); |         OptionsBuilder optionsBuilder = new OptionsBuilder(); | ||||||
|         try { |         try { | ||||||
|       when(ollamaAPI.generate(model, prompt, optionsBuilder.build())) |             when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build())) | ||||||
|                     .thenReturn(new OllamaResult("", 0, 200)); |                     .thenReturn(new OllamaResult("", 0, 200)); | ||||||
|       ollamaAPI.generate(model, prompt, optionsBuilder.build()); |             ollamaAPI.generate(model, prompt, false, optionsBuilder.build()); | ||||||
|       verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build()); |             verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build()); | ||||||
|         } catch (IOException | OllamaBaseException | InterruptedException e) { |         } catch (IOException | OllamaBaseException | InterruptedException e) { | ||||||
|             throw new RuntimeException(e); |             throw new RuntimeException(e); | ||||||
|         } |         } | ||||||
| @@ -155,9 +156,9 @@ class TestMockedAPIs { | |||||||
|         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); |         OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); | ||||||
|         String model = OllamaModelType.LLAMA2; |         String model = OllamaModelType.LLAMA2; | ||||||
|         String prompt = "some prompt text"; |         String prompt = "some prompt text"; | ||||||
|     when(ollamaAPI.generateAsync(model, prompt)) |         when(ollamaAPI.generateAsync(model, prompt, false)) | ||||||
|         .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); |                 .thenReturn(new OllamaAsyncResultStreamer(null, null, 3)); | ||||||
|     ollamaAPI.generateAsync(model, prompt); |         ollamaAPI.generateAsync(model, prompt, false); | ||||||
|     verify(ollamaAPI, times(1)).generateAsync(model, prompt); |         verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -6,30 +6,30 @@ import com.fasterxml.jackson.core.JsonProcessingException; | |||||||
| import com.fasterxml.jackson.databind.ObjectMapper; | import com.fasterxml.jackson.databind.ObjectMapper; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | import io.github.amithkoujalgi.ollama4j.core.utils.Utils; | ||||||
| 
 | 
 | ||||||
| public abstract class AbstractRequestSerializationTest<T> { | public abstract class AbstractSerializationTest<T> { | ||||||
| 
 | 
 | ||||||
|     protected ObjectMapper mapper = Utils.getObjectMapper(); |     protected ObjectMapper mapper = Utils.getObjectMapper(); | ||||||
| 
 | 
 | ||||||
|     protected String serializeRequest(T req) { |     protected String serialize(T obj) { | ||||||
|         try { |         try { | ||||||
|             return mapper.writeValueAsString(req); |             return mapper.writeValueAsString(obj); | ||||||
|         } catch (JsonProcessingException e) { |         } catch (JsonProcessingException e) { | ||||||
|             fail("Could not serialize request!", e); |             fail("Could not serialize request!", e); | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     protected T deserializeRequest(String jsonRequest, Class<T> requestClass) { |     protected T deserialize(String jsonObject, Class<T> deserializationClass) { | ||||||
|         try { |         try { | ||||||
|             return mapper.readValue(jsonRequest, requestClass); |             return mapper.readValue(jsonObject, deserializationClass); | ||||||
|         } catch (JsonProcessingException e) { |         } catch (JsonProcessingException e) { | ||||||
|             fail("Could not deserialize jsonRequest!", e); |             fail("Could not deserialize jsonObject!", e); | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest, |     protected void assertEqualsAfterUnmarshalling(T unmarshalledObject, | ||||||
|         T req) { |         T req) { | ||||||
|         assertEquals(req, unmarshalledRequest); |         assertEquals(req, unmarshalledObject); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| @@ -14,7 +14,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilde | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
| public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{ | public class TestChatRequestSerialization extends AbstractSerializationTest<OllamaChatRequestModel> { | ||||||
|  |  | ||||||
|     private OllamaChatRequestBuilder builder; |     private OllamaChatRequestBuilder builder; | ||||||
|  |  | ||||||
| @@ -26,8 +26,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe | |||||||
|     @Test |     @Test | ||||||
|     public void testRequestOnlyMandatoryFields() { |     public void testRequestOnlyMandatoryFields() { | ||||||
|         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -35,28 +35,43 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe | |||||||
|         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") | ||||||
|         .withMessage(OllamaChatMessageRole.USER, "Some prompt") |         .withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|         .build(); |         .build(); | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     public void testRequestWithMessageAndImage() { |     public void testRequestWithMessageAndImage() { | ||||||
|         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", | ||||||
|                 List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); |                 List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     public void testRequestWithOptions() { |     public void testRequestWithOptions() { | ||||||
|         OptionsBuilder b = new OptionsBuilder(); |         OptionsBuilder b = new OptionsBuilder(); | ||||||
|         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|                 .withOptions(b.setMirostat(1).build()).build(); |             .withOptions(b.setMirostat(1).build()) | ||||||
|  |             .withOptions(b.setTemperature(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatEta(1L).build()) | ||||||
|  |             .withOptions(b.setMirostatTau(1L).build()) | ||||||
|  |             .withOptions(b.setNumGpu(1).build()) | ||||||
|  |             .withOptions(b.setSeed(1).build()) | ||||||
|  |             .withOptions(b.setTopK(1).build()) | ||||||
|  |             .withOptions(b.setTopP(1).build()) | ||||||
|  |             .build(); | ||||||
|  |  | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class); |         OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest, req); |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("num_gpu")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("seed")); | ||||||
|  |         assertEquals(1, deserializeRequest.getOptions().get("top_k")); | ||||||
|  |         assertEquals(1.0, deserializeRequest.getOptions().get("top_p")); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -64,11 +79,35 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe | |||||||
|         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") |         OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") | ||||||
|                 .withGetJsonResponse().build(); |                 .withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         // no jackson deserialization as format property is not boolean ==> omit as deserialization |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|         // of request is never used in real code anyways |         // of request is never used in real code anyways | ||||||
|         JSONObject jsonObject = new JSONObject(jsonRequest); |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|         String requestFormatProperty = jsonObject.getString("format"); |         String requestFormatProperty = jsonObject.getString("format"); | ||||||
|         assertEquals("json", requestFormatProperty); |         assertEquals("json", requestFormatProperty); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithTemplate() { | ||||||
|  |         OllamaChatRequestModel req = builder.withTemplate("System Template") | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithStreaming() { | ||||||
|  |         OllamaChatRequestModel req = builder.withStreaming().build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testWithKeepAlive() { | ||||||
|  |         String expectedKeepAlive = "5m"; | ||||||
|  |         OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) | ||||||
|  |             .build(); | ||||||
|  |         String jsonRequest = serialize(req); | ||||||
|  |         assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -7,7 +7,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsR | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
| public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{ | public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest<OllamaEmbeddingsRequestModel> { | ||||||
|  |  | ||||||
|         private OllamaEmbeddingsRequestBuilder builder; |         private OllamaEmbeddingsRequestBuilder builder; | ||||||
|  |  | ||||||
| @@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa | |||||||
|             @Test |             @Test | ||||||
|     public void testRequestOnlyMandatoryFields() { |     public void testRequestOnlyMandatoryFields() { | ||||||
|         OllamaEmbeddingsRequestModel req = builder.build(); |         OllamaEmbeddingsRequestModel req = builder.build(); | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req); |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|         @Test |         @Test | ||||||
| @@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa | |||||||
|             OllamaEmbeddingsRequestModel req = builder |             OllamaEmbeddingsRequestModel req = builder | ||||||
|                     .withOptions(b.setMirostat(1).build()).build(); |                     .withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|             String jsonRequest = serializeRequest(req); |             String jsonRequest = serialize(req); | ||||||
|             OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class); |             OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class); | ||||||
|             assertEqualsAfterUnmarshalling(deserializeRequest, req); |             assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|             assertEquals(1, deserializeRequest.getOptions().get("mirostat")); |             assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|         } |         } | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateReque | |||||||
| import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; | ||||||
| import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; | ||||||
|  |  | ||||||
| public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{ | public class TestGenerateRequestSerialization extends AbstractSerializationTest<OllamaGenerateRequestModel> { | ||||||
|  |  | ||||||
|     private OllamaGenerateRequestBuilder builder; |     private OllamaGenerateRequestBuilder builder; | ||||||
|  |  | ||||||
| @@ -24,8 +24,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati | |||||||
|     public void testRequestOnlyMandatoryFields() { |     public void testRequestOnlyMandatoryFields() { | ||||||
|         OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); |         OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); | ||||||
|  |  | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req); |         assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
| @@ -34,8 +34,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati | |||||||
|         OllamaGenerateRequestModel req = |         OllamaGenerateRequestModel req = | ||||||
|                 builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); |                 builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); | ||||||
|  |  | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class); |         OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class); | ||||||
|         assertEqualsAfterUnmarshalling(deserializeRequest, req); |         assertEqualsAfterUnmarshalling(deserializeRequest, req); | ||||||
|         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); |         assertEquals(1, deserializeRequest.getOptions().get("mirostat")); | ||||||
|     } |     } | ||||||
| @@ -45,7 +45,7 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati | |||||||
|         OllamaGenerateRequestModel req = |         OllamaGenerateRequestModel req = | ||||||
|                 builder.withPrompt("Some prompt").withGetJsonResponse().build(); |                 builder.withPrompt("Some prompt").withGetJsonResponse().build(); | ||||||
|  |  | ||||||
|         String jsonRequest = serializeRequest(req); |         String jsonRequest = serialize(req); | ||||||
|         // no jackson deserialization as format property is not boolean ==> omit as deserialization |         // no jackson deserialization as format property is not boolean ==> omit as deserialization | ||||||
|         // of request is never used in real code anyways |         // of request is never used in real code anyways | ||||||
|         JSONObject jsonObject = new JSONObject(jsonRequest); |         JSONObject jsonObject = new JSONObject(jsonRequest); | ||||||
|   | |||||||
| @@ -0,0 +1,42 @@ | |||||||
|  | package io.github.amithkoujalgi.ollama4j.unittests.jackson; | ||||||
|  |  | ||||||
|  | import io.github.amithkoujalgi.ollama4j.core.models.Model; | ||||||
|  | import org.junit.jupiter.api.Test; | ||||||
|  |  | ||||||
|  | public class TestModelRequestSerialization extends AbstractSerializationTest<Model> { | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testDeserializationOfModelResponseWithOffsetTime(){ | ||||||
|  |         String serializedTestStringWithOffsetTime = "{\n" | ||||||
|  |                 + "\"name\": \"codellama:13b\",\n" | ||||||
|  |                 + "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n" | ||||||
|  |                 + "\"size\": 7365960935,\n" | ||||||
|  |                 + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" | ||||||
|  |                 + "\"details\": {\n" | ||||||
|  |                 + "\"format\": \"gguf\",\n" | ||||||
|  |                 + "\"family\": \"llama\",\n" | ||||||
|  |                 + "\"families\": null,\n" | ||||||
|  |                 + "\"parameter_size\": \"13B\",\n" | ||||||
|  |                 + "\"quantization_level\": \"Q4_0\"\n" | ||||||
|  |                 + "}}"; | ||||||
|  |         deserialize(serializedTestStringWithOffsetTime,Model.class); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Test | ||||||
|  |     public void testDeserializationOfModelResponseWithZuluTime(){ | ||||||
|  |         String serializedTestStringWithZuluTimezone = "{\n" | ||||||
|  |                 + "\"name\": \"codellama:13b\",\n" | ||||||
|  |                 + "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n" | ||||||
|  |                 + "\"size\": 7365960935,\n" | ||||||
|  |                 + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" | ||||||
|  |                 + "\"details\": {\n" | ||||||
|  |                 + "\"format\": \"gguf\",\n" | ||||||
|  |                 + "\"family\": \"llama\",\n" | ||||||
|  |                 + "\"families\": null,\n" | ||||||
|  |                 + "\"parameter_size\": \"13B\",\n" | ||||||
|  |                 + "\"quantization_level\": \"Q4_0\"\n" | ||||||
|  |                 + "}}"; | ||||||
|  |         deserialize(serializedTestStringWithZuluTimezone,Model.class); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  | } | ||||||
		Reference in New Issue
	
	Block a user