From 01c5a8f07fa34f1392c6613a627ab2ab3b058db1 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 30 Dec 2023 12:23:42 +0530 Subject: [PATCH] updated readme --- README.md | 58 +++++++++++-------- .../ollama4j/core/OllamaAPI.java | 24 ++++---- 2 files changed, 46 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index f6d4a52..e50996d 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,33 @@ ollama4j-icon -A Java library (wrapper/binding) -for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs. +A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server. + +Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). + +![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j) +![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j) +![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j) +![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j) +![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j) +![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j) +![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=green) +![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false) + +![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg) + +## Table of Contents + +- [How does it work?](#how-does-it-work) +- [Requirements](#requirements) +- [Installation](#installation) +- [API Spec](#api-spec) +- [Demo APIs](#try-out-the-apis-with-ollama-server) +- [Development](#development) +- [Contributions](#get-involved) +- [References](#references) + +#### How does it work? ```mermaid flowchart LR @@ -17,26 +42,6 @@ for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs. end ``` -![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg) - -![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j?style=social) -![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j?style=social) -![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j?style=social) -![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j?style=plastic) -![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j?style=plastic) -![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j?style=plastic) -![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=red&style=plastic) -![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false) - -## Table of Contents - -- [Requirements](#requirements) -- [Installation](#installation) -- [API Spec](#api-spec) -- [Demo APIs](#try-out-the-apis-with-ollama-server) -- [Development](#development) -- [Contributions](#get-involved) - #### Requirements ![Java](https://img.shields.io/badge/Java-11_+-green.svg?style=just-the-message&labelColor=gray) @@ -64,7 +69,7 @@ In your Maven project, add this dependency: ``` -Latest release: +Latest release: ![Maven Central](https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j) @@ -76,7 +81,7 @@ Latest release: #### API Spec -Find the full `Javadoc` (API specifications) [here](https://amithkoujalgi.github.io/ollama4j/). +Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/). #### Development @@ -117,6 +122,7 @@ Actions CI workflow. - [x] Use lombok - [x] Update request body creation with Java objects - [ ] Async APIs for images +- [ ] Add custom headers to requests - [ ] Add additional params for `ask` APIs such as: - `options`: additional model parameters for the Modelfile such as `temperature` - `system`: system prompt to (overrides what is defined in the Modelfile) @@ -138,3 +144,7 @@ of contribution is much appreciated. The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) project. + +### References + +- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) \ No newline at end of file diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java index a801793..d1c6cee 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java @@ -331,12 +331,12 @@ public class OllamaAPI { * Ask a question to a model running on Ollama server. This is a sync/blocking call. * * @param model the ollama model to ask the question to - * @param promptText the prompt/question text + * @param prompt the prompt/question text * @return OllamaResult that includes response text and time taken for response */ - public OllamaResult ask(String model, String promptText) + public OllamaResult ask(String model, String prompt) throws OllamaBaseException, IOException, InterruptedException { - OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText); + OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); return askSync(ollamaRequestModel); } @@ -346,11 +346,11 @@ public class OllamaAPI { * async/non-blocking call. * * @param model the ollama model to ask the question to - * @param promptText the prompt/question text + * @param prompt the prompt/question text * @return the ollama async result callback handle */ - public OllamaAsyncResultCallback askAsync(String model, String promptText) { - OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText); + public OllamaAsyncResultCallback askAsync(String model, String prompt) { + OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt); URI uri = URI.create(this.host + "/api/generate"); OllamaAsyncResultCallback ollamaAsyncResultCallback = @@ -365,17 +365,17 @@ public class OllamaAPI { * sync/blocking call. * * @param model the ollama model to ask the question to - * @param promptText the prompt/question text + * @param prompt the prompt/question text * @param imageFiles the list of image files to use for the question * @return OllamaResult that includes response text and time taken for response */ - public OllamaResult askWithImageFiles(String model, String promptText, List imageFiles) + public OllamaResult askWithImageFiles(String model, String prompt, List imageFiles) throws OllamaBaseException, IOException, InterruptedException { List images = new ArrayList<>(); for (File imageFile : imageFiles) { images.add(encodeFileToBase64(imageFile)); } - OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images); + OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); return askSync(ollamaRequestModel); } @@ -384,17 +384,17 @@ public class OllamaAPI { * sync/blocking call. * * @param model the ollama model to ask the question to - * @param promptText the prompt/question text + * @param prompt the prompt/question text * @param imageURLs the list of image URLs to use for the question * @return OllamaResult that includes response text and time taken for response */ - public OllamaResult askWithImageURLs(String model, String promptText, List imageURLs) + public OllamaResult askWithImageURLs(String model, String prompt, List imageURLs) throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { List images = new ArrayList<>(); for (String imageURL : imageURLs) { images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL))); } - OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images); + OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images); return askSync(ollamaRequestModel); }