diff --git a/README.md b/README.md
index f6d4a52..e50996d 100644
--- a/README.md
+++ b/README.md
@@ -2,8 +2,33 @@
-A Java library (wrapper/binding)
-for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
+A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
+
+Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/).
+
+
+
+
+
+
+
+
+
+
+
+
+## Table of Contents
+
+- [How does it work?](#how-does-it-work)
+- [Requirements](#requirements)
+- [Installation](#installation)
+- [API Spec](#api-spec)
+- [Demo APIs](#try-out-the-apis-with-ollama-server)
+- [Development](#development)
+- [Contributions](#get-involved)
+- [References](#references)
+
+#### How does it work?
```mermaid
flowchart LR
@@ -17,26 +42,6 @@ for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
end
```
-
-
-
-
-
-
-
-
-
-
-
-## Table of Contents
-
-- [Requirements](#requirements)
-- [Installation](#installation)
-- [API Spec](#api-spec)
-- [Demo APIs](#try-out-the-apis-with-ollama-server)
-- [Development](#development)
-- [Contributions](#get-involved)
-
#### Requirements

@@ -64,7 +69,7 @@ In your Maven project, add this dependency:
```
-Latest release:
+Latest release:

@@ -76,7 +81,7 @@ Latest release:
#### API Spec
-Find the full `Javadoc` (API specifications) [here](https://amithkoujalgi.github.io/ollama4j/).
+Find the full API specifications on the [website](https://amithkoujalgi.github.io/ollama4j/).
#### Development
@@ -117,6 +122,7 @@ Actions CI workflow.
- [x] Use lombok
- [x] Update request body creation with Java objects
- [ ] Async APIs for images
+- [ ] Add custom headers to requests
- [ ] Add additional params for `ask` APIs such as:
- `options`: additional model parameters for the Modelfile such as `temperature`
- `system`: system prompt to (overrides what is defined in the Modelfile)
@@ -138,3 +144,7 @@ of contribution is much appreciated.
The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/)
project.
+
+### References
+
+- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
\ No newline at end of file
diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java
index a801793..d1c6cee 100644
--- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java
+++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java
@@ -331,12 +331,12 @@ public class OllamaAPI {
* Ask a question to a model running on Ollama server. This is a sync/blocking call.
*
* @param model the ollama model to ask the question to
- * @param promptText the prompt/question text
+ * @param prompt the prompt/question text
* @return OllamaResult that includes response text and time taken for response
*/
- public OllamaResult ask(String model, String promptText)
+ public OllamaResult ask(String model, String prompt)
throws OllamaBaseException, IOException, InterruptedException {
- OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
return askSync(ollamaRequestModel);
}
@@ -346,11 +346,11 @@ public class OllamaAPI {
* async/non-blocking call.
*
* @param model the ollama model to ask the question to
- * @param promptText the prompt/question text
+ * @param prompt the prompt/question text
* @return the ollama async result callback handle
*/
- public OllamaAsyncResultCallback askAsync(String model, String promptText) {
- OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText);
+ public OllamaAsyncResultCallback askAsync(String model, String prompt) {
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
URI uri = URI.create(this.host + "/api/generate");
OllamaAsyncResultCallback ollamaAsyncResultCallback =
@@ -365,17 +365,17 @@ public class OllamaAPI {
* sync/blocking call.
*
* @param model the ollama model to ask the question to
- * @param promptText the prompt/question text
+ * @param prompt the prompt/question text
* @param imageFiles the list of image files to use for the question
* @return OllamaResult that includes response text and time taken for response
*/
- public OllamaResult askWithImageFiles(String model, String promptText, List imageFiles)
+ public OllamaResult askWithImageFiles(String model, String prompt, List imageFiles)
throws OllamaBaseException, IOException, InterruptedException {
List images = new ArrayList<>();
for (File imageFile : imageFiles) {
images.add(encodeFileToBase64(imageFile));
}
- OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images);
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
return askSync(ollamaRequestModel);
}
@@ -384,17 +384,17 @@ public class OllamaAPI {
* sync/blocking call.
*
* @param model the ollama model to ask the question to
- * @param promptText the prompt/question text
+ * @param prompt the prompt/question text
* @param imageURLs the list of image URLs to use for the question
* @return OllamaResult that includes response text and time taken for response
*/
- public OllamaResult askWithImageURLs(String model, String promptText, List imageURLs)
+ public OllamaResult askWithImageURLs(String model, String prompt, List imageURLs)
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
List images = new ArrayList<>();
for (String imageURL : imageURLs) {
images.add(encodeByteArrayToBase64(loadImageBytesFromUrl(imageURL)));
}
- OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, promptText, images);
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
return askSync(ollamaRequestModel);
}