diff --git a/.gitignore b/.gitignore
index 524f096..a91c35d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,24 +1,39 @@
-# Compiled class file
-*.class
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**/target/
+!**/src/test/**/target/
-# Log file
-*.log
+### IntelliJ IDEA ###
+.idea/modules.xml
+.idea/jarRepositories.xml
+.idea/compiler.xml
+.idea/libraries/
+*.iws
+*.iml
+*.ipr
-# BlueJ files
-*.ctxt
+### Eclipse ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
-# Mobile Tools for Java (J2ME)
-.mtj.tmp/
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
-# Package Files #
-*.jar
-*.war
-*.nar
-*.ear
-*.zip
-*.tar.gz
-*.rar
+### VS Code ###
+.vscode/
-# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
-hs_err_pid*
-replay_pid*
+### Mac OS ###
+.DS_Store
+/.idea/
diff --git a/README.md b/README.md
index 0cb651b..23929dd 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,42 @@
-# ollama4j
-Java API for interacting with Ollama
+### Ollama4j
+
+A Java wrapper for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
+
+Prerequisites:
+
+- Docker
+- Java 8+
+
+
+Start Ollama Container:
+```
+docker run -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
+```
+
+Submit a question to Ollama using Ollama4j:
+
+```java
+public class Test {
+ public static void main(String[] args) throws Exception {
+ String host = "http://localhost:11434/";
+
+ OllamaAPI ollamaAPI = new OllamaAPI(host);
+
+ ollamaAPI.pullModel(OllamaModel.LLAMA2);
+
+ OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.runAsync(OllamaModel.LLAMA2, "Who are you?");
+ while (true) {
+ if (ollamaAsyncResultCallback.isComplete()) {
+ System.out.println(ollamaAsyncResultCallback.getResponse());
+ break;
+ }
+ Thread.sleep(1000);
+ }
+ }
+}
+```
+
+You'd then get a response from Ollama:
+```
+I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that require natural language understanding and generation capabilities.
+```
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..daab268
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,29 @@
+
+
+ 4.0.0
+
+ org.ollama4j
+ ollama4j
+ 1.0-SNAPSHOT
+
+
+ 20
+ 20
+ UTF-8
+
+
+
+
+ com.google.code.gson
+ gson
+ 2.10.1
+
+
+ org.apache.httpcomponents.client5
+ httpclient5
+ 5.2.1
+
+
+
\ No newline at end of file
diff --git a/src/main/java/org/ollama4j/Main.java b/src/main/java/org/ollama4j/Main.java
new file mode 100644
index 0000000..af4ad6b
--- /dev/null
+++ b/src/main/java/org/ollama4j/Main.java
@@ -0,0 +1,19 @@
+package org.ollama4j;
+
+public class Main {
+ public static void main(String[] args) throws Exception {
+ String host = "http://localhost:11434/";
+ OllamaAPI ollamaAPI = new OllamaAPI(host);
+ ollamaAPI.pullModel(OllamaModel.LLAMA2);
+ OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.runAsync(OllamaModel.LLAMA2, "Who are you?");
+ while (true) {
+ if (ollamaAsyncResultCallback.isComplete()) {
+ System.out.println(ollamaAsyncResultCallback.getResponse());
+ break;
+ }
+ Thread.sleep(1000);
+ }
+ }
+}
+
+
diff --git a/src/main/java/org/ollama4j/OllamaAPI.java b/src/main/java/org/ollama4j/OllamaAPI.java
new file mode 100644
index 0000000..cd600be
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaAPI.java
@@ -0,0 +1,103 @@
+package org.ollama4j;
+
+import com.google.gson.Gson;
+import org.apache.hc.client5.http.HttpResponseException;
+import org.apache.hc.client5.http.classic.methods.HttpPost;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
+import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
+import org.apache.hc.core5.http.HttpEntity;
+import org.apache.hc.core5.http.ParseException;
+import org.apache.hc.core5.http.io.entity.EntityUtils;
+import org.apache.hc.core5.http.io.entity.StringEntity;
+
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+@SuppressWarnings("deprecation")
+public class OllamaAPI {
+ private final String host;
+
+ public OllamaAPI(String host) {
+ if (host.endsWith("/")) {
+ this.host = host.substring(0, host.length() - 1);
+ } else {
+ this.host = host;
+ }
+ }
+
+ public void pullModel(OllamaModel model) throws IOException, ParseException, OllamaBaseException {
+ String url = this.host + "/api/pull";
+ String jsonData = String.format("{\"name\": \"%s\"}", model.getModel());
+ final HttpPost httpPost = new HttpPost(url);
+ final StringEntity entity = new StringEntity(jsonData);
+ httpPost.setEntity(entity);
+ httpPost.setHeader("Accept", "application/json");
+ httpPost.setHeader("Content-type", "application/json");
+ try (CloseableHttpClient client = HttpClients.createDefault();
+ CloseableHttpResponse response = client.execute(httpPost)) {
+ final int statusCode = response.getCode();
+ HttpEntity responseEntity = response.getEntity();
+ String responseString = "";
+ if (responseEntity != null) {
+ responseString = EntityUtils.toString(responseEntity, "UTF-8");
+ }
+ if (statusCode == 200) {
+ System.out.println(responseString);
+ } else {
+ throw new OllamaBaseException(statusCode + " - " + responseString);
+ }
+ }
+ }
+
+ public String runSync(OllamaModel ollamaModel, String promptText) throws OllamaBaseException, IOException {
+ Gson gson = new Gson();
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(ollamaModel.getModel(), promptText);
+ URL obj = new URL(this.host + "/api/generate");
+ HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+ con.setRequestMethod("POST");
+ con.setDoOutput(true);
+ con.setRequestProperty("Content-Type", "application/json");
+ try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
+ wr.writeBytes(ollamaRequestModel.toString());
+ }
+ int responseCode = con.getResponseCode();
+ if (responseCode == HttpURLConnection.HTTP_OK) {
+ try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
+ String inputLine;
+ StringBuilder response = new StringBuilder();
+ while ((inputLine = in.readLine()) != null) {
+ OllamaResponseModel ollamaResponseModel = gson.fromJson(inputLine, OllamaResponseModel.class);
+ if (!ollamaResponseModel.getDone()) {
+ response.append(ollamaResponseModel.getResponse());
+ }
+ System.out.println("Streamed response line: " + ollamaResponseModel.getResponse());
+ }
+ in.close();
+ return response.toString();
+ }
+ } else {
+ throw new OllamaBaseException(con.getResponseCode() + " - " + con.getResponseMessage());
+ }
+ }
+
+ public OllamaAsyncResultCallback runAsync(OllamaModel ollamaModel, String promptText) throws OllamaBaseException, IOException {
+ OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(ollamaModel.getModel(), promptText);
+ URL obj = new URL(this.host + "/api/generate");
+ HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+ con.setRequestMethod("POST");
+ con.setDoOutput(true);
+ con.setRequestProperty("Content-Type", "application/json");
+ try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
+ wr.writeBytes(ollamaRequestModel.toString());
+ }
+ OllamaAsyncResultCallback ollamaAsyncResultCallback = new OllamaAsyncResultCallback(con);
+ ollamaAsyncResultCallback.start();
+ return ollamaAsyncResultCallback;
+ }
+}
+
diff --git a/src/main/java/org/ollama4j/OllamaAsyncResultCallback.java b/src/main/java/org/ollama4j/OllamaAsyncResultCallback.java
new file mode 100644
index 0000000..1e3dd59
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaAsyncResultCallback.java
@@ -0,0 +1,58 @@
+package org.ollama4j;
+
+import com.google.gson.Gson;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+
+public class OllamaAsyncResultCallback extends Thread {
+ private final HttpURLConnection connection;
+ private String result;
+ private boolean isDone;
+
+ public OllamaAsyncResultCallback(HttpURLConnection con) {
+ this.connection = con;
+ this.isDone = false;
+ this.result = "";
+ }
+
+ @Override
+ public void run() {
+ Gson gson = new Gson();
+ int responseCode = 0;
+ try {
+ responseCode = this.connection.getResponseCode();
+ if (responseCode == HttpURLConnection.HTTP_OK) {
+ try (BufferedReader in = new BufferedReader(new InputStreamReader(this.connection.getInputStream()))) {
+ String inputLine;
+ StringBuilder response = new StringBuilder();
+ while ((inputLine = in.readLine()) != null) {
+ OllamaResponseModel ollamaResponseModel = gson.fromJson(inputLine, OllamaResponseModel.class);
+ if (!ollamaResponseModel.getDone()) {
+ response.append(ollamaResponseModel.getResponse());
+ }
+// System.out.println("Streamed response line: " + responseModel.getResponse());
+ }
+ in.close();
+ this.isDone = true;
+ this.result = response.toString();
+ }
+ } else {
+ throw new OllamaBaseException(connection.getResponseCode() + " - " + connection.getResponseMessage());
+ }
+ } catch (IOException | OllamaBaseException e) {
+ this.isDone = true;
+ this.result = "FAILED! " + e.getMessage();
+ }
+ }
+
+ public boolean isComplete() {
+ return isDone;
+ }
+
+ public String getResponse() {
+ return result;
+ }
+}
diff --git a/src/main/java/org/ollama4j/OllamaBaseException.java b/src/main/java/org/ollama4j/OllamaBaseException.java
new file mode 100644
index 0000000..ddab3ed
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaBaseException.java
@@ -0,0 +1,8 @@
+package org.ollama4j;
+
+public class OllamaBaseException extends Exception {
+
+ public OllamaBaseException(String s) {
+ super(s);
+ }
+}
diff --git a/src/main/java/org/ollama4j/OllamaModel.java b/src/main/java/org/ollama4j/OllamaModel.java
new file mode 100644
index 0000000..f62bf40
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaModel.java
@@ -0,0 +1,15 @@
+package org.ollama4j;
+
+public enum OllamaModel {
+ LLAMA2("llama2"), MISTRAL("mistral"), MEDLLAMA2("medllama2");
+
+ private final String model;
+
+ OllamaModel(String model) {
+ this.model = model;
+ }
+
+ public String getModel() {
+ return model;
+ }
+}
diff --git a/src/main/java/org/ollama4j/OllamaRequestModel.java b/src/main/java/org/ollama4j/OllamaRequestModel.java
new file mode 100644
index 0000000..de70ae5
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaRequestModel.java
@@ -0,0 +1,34 @@
+package org.ollama4j;
+
+import com.google.gson.Gson;
+
+public class OllamaRequestModel {
+ private String model;
+ private String prompt;
+
+ public OllamaRequestModel(String model, String prompt) {
+ this.model = model;
+ this.prompt = prompt;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+
+ public String getPrompt() {
+ return prompt;
+ }
+
+ public void setPrompt(String prompt) {
+ this.prompt = prompt;
+ }
+
+ @Override
+ public String toString() {
+ return new Gson().toJson(this);
+ }
+}
diff --git a/src/main/java/org/ollama4j/OllamaResponseModel.java b/src/main/java/org/ollama4j/OllamaResponseModel.java
new file mode 100644
index 0000000..9ccadca
--- /dev/null
+++ b/src/main/java/org/ollama4j/OllamaResponseModel.java
@@ -0,0 +1,62 @@
+package org.ollama4j;
+
+import java.util.List;
+
+public
+class OllamaResponseModel {
+ private String model;
+ private String created_at;
+ private String response;
+ private Boolean done;
+ private List context;
+ private Long total_duration;
+ private Long load_duration;
+ private Long prompt_eval_duration;
+ private Long eval_duration;
+ private Integer prompt_eval_count;
+ private Integer eval_count;
+
+ public String getModel() {
+ return model;
+ }
+
+ public String getCreated_at() {
+ return created_at;
+ }
+
+ public String getResponse() {
+ return response;
+ }
+
+ public Boolean getDone() {
+ return done;
+ }
+
+ public List getContext() {
+ return context;
+ }
+
+ public Long getTotal_duration() {
+ return total_duration;
+ }
+
+ public Long getLoad_duration() {
+ return load_duration;
+ }
+
+ public Long getPrompt_eval_duration() {
+ return prompt_eval_duration;
+ }
+
+ public Long getEval_duration() {
+ return eval_duration;
+ }
+
+ public Integer getPrompt_eval_count() {
+ return prompt_eval_count;
+ }
+
+ public Integer getEval_count() {
+ return eval_count;
+ }
+}