mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-05-15 20:07:10 +02:00
init
This commit is contained in:
parent
18f5141cbf
commit
4a536139e8
53
.gitignore
vendored
53
.gitignore
vendored
@ -1,24 +1,39 @@
|
|||||||
# Compiled class file
|
target/
|
||||||
*.class
|
!.mvn/wrapper/maven-wrapper.jar
|
||||||
|
!**/src/main/**/target/
|
||||||
|
!**/src/test/**/target/
|
||||||
|
|
||||||
# Log file
|
### IntelliJ IDEA ###
|
||||||
*.log
|
.idea/modules.xml
|
||||||
|
.idea/jarRepositories.xml
|
||||||
|
.idea/compiler.xml
|
||||||
|
.idea/libraries/
|
||||||
|
*.iws
|
||||||
|
*.iml
|
||||||
|
*.ipr
|
||||||
|
|
||||||
# BlueJ files
|
### Eclipse ###
|
||||||
*.ctxt
|
.apt_generated
|
||||||
|
.classpath
|
||||||
|
.factorypath
|
||||||
|
.project
|
||||||
|
.settings
|
||||||
|
.springBeans
|
||||||
|
.sts4-cache
|
||||||
|
|
||||||
# Mobile Tools for Java (J2ME)
|
### NetBeans ###
|
||||||
.mtj.tmp/
|
/nbproject/private/
|
||||||
|
/nbbuild/
|
||||||
|
/dist/
|
||||||
|
/nbdist/
|
||||||
|
/.nb-gradle/
|
||||||
|
build/
|
||||||
|
!**/src/main/**/build/
|
||||||
|
!**/src/test/**/build/
|
||||||
|
|
||||||
# Package Files #
|
### VS Code ###
|
||||||
*.jar
|
.vscode/
|
||||||
*.war
|
|
||||||
*.nar
|
|
||||||
*.ear
|
|
||||||
*.zip
|
|
||||||
*.tar.gz
|
|
||||||
*.rar
|
|
||||||
|
|
||||||
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
|
### Mac OS ###
|
||||||
hs_err_pid*
|
.DS_Store
|
||||||
replay_pid*
|
/.idea/
|
||||||
|
44
README.md
44
README.md
@ -1,2 +1,42 @@
|
|||||||
# ollama4j
|
### Ollama4j
|
||||||
Java API for interacting with Ollama
|
|
||||||
|
A Java wrapper for [Ollama](https://github.com/jmorganca/ollama/blob/main/docs/api.md) APIs.
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
|
||||||
|
- Docker
|
||||||
|
- Java 8+
|
||||||
|
|
||||||
|
|
||||||
|
Start Ollama Container:
|
||||||
|
```
|
||||||
|
docker run -v ~/ollama:/root/.ollama -p 11434:11434 ollama/ollama
|
||||||
|
```
|
||||||
|
|
||||||
|
Submit a question to Ollama using Ollama4j:
|
||||||
|
|
||||||
|
```java
|
||||||
|
public class Test {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
|
||||||
|
ollamaAPI.pullModel(OllamaModel.LLAMA2);
|
||||||
|
|
||||||
|
OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.runAsync(OllamaModel.LLAMA2, "Who are you?");
|
||||||
|
while (true) {
|
||||||
|
if (ollamaAsyncResultCallback.isComplete()) {
|
||||||
|
System.out.println(ollamaAsyncResultCallback.getResponse());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Thread.sleep(1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You'd then get a response from Ollama:
|
||||||
|
```
|
||||||
|
I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational manner. I am trained on a massive dataset of text from the internet and can generate human-like responses to a wide range of topics and questions. I can be used to create chatbots, virtual assistants, and other applications that require natural language understanding and generation capabilities.
|
||||||
|
```
|
29
pom.xml
Normal file
29
pom.xml
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>org.ollama4j</groupId>
|
||||||
|
<artifactId>ollama4j</artifactId>
|
||||||
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.source>20</maven.compiler.source>
|
||||||
|
<maven.compiler.target>20</maven.compiler.target>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.code.gson</groupId>
|
||||||
|
<artifactId>gson</artifactId>
|
||||||
|
<version>2.10.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.httpcomponents.client5</groupId>
|
||||||
|
<artifactId>httpclient5</artifactId>
|
||||||
|
<version>5.2.1</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</project>
|
19
src/main/java/org/ollama4j/Main.java
Normal file
19
src/main/java/org/ollama4j/Main.java
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String host = "http://localhost:11434/";
|
||||||
|
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||||
|
ollamaAPI.pullModel(OllamaModel.LLAMA2);
|
||||||
|
OllamaAsyncResultCallback ollamaAsyncResultCallback = ollamaAPI.runAsync(OllamaModel.LLAMA2, "Who are you?");
|
||||||
|
while (true) {
|
||||||
|
if (ollamaAsyncResultCallback.isComplete()) {
|
||||||
|
System.out.println(ollamaAsyncResultCallback.getResponse());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Thread.sleep(1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
103
src/main/java/org/ollama4j/OllamaAPI.java
Normal file
103
src/main/java/org/ollama4j/OllamaAPI.java
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
import org.apache.hc.client5.http.HttpResponseException;
|
||||||
|
import org.apache.hc.client5.http.classic.methods.HttpPost;
|
||||||
|
import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
|
||||||
|
import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse;
|
||||||
|
import org.apache.hc.client5.http.impl.classic.HttpClients;
|
||||||
|
import org.apache.hc.core5.http.HttpEntity;
|
||||||
|
import org.apache.hc.core5.http.ParseException;
|
||||||
|
import org.apache.hc.core5.http.io.entity.EntityUtils;
|
||||||
|
import org.apache.hc.core5.http.io.entity.StringEntity;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.DataOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
|
public class OllamaAPI {
|
||||||
|
private final String host;
|
||||||
|
|
||||||
|
public OllamaAPI(String host) {
|
||||||
|
if (host.endsWith("/")) {
|
||||||
|
this.host = host.substring(0, host.length() - 1);
|
||||||
|
} else {
|
||||||
|
this.host = host;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void pullModel(OllamaModel model) throws IOException, ParseException, OllamaBaseException {
|
||||||
|
String url = this.host + "/api/pull";
|
||||||
|
String jsonData = String.format("{\"name\": \"%s\"}", model.getModel());
|
||||||
|
final HttpPost httpPost = new HttpPost(url);
|
||||||
|
final StringEntity entity = new StringEntity(jsonData);
|
||||||
|
httpPost.setEntity(entity);
|
||||||
|
httpPost.setHeader("Accept", "application/json");
|
||||||
|
httpPost.setHeader("Content-type", "application/json");
|
||||||
|
try (CloseableHttpClient client = HttpClients.createDefault();
|
||||||
|
CloseableHttpResponse response = client.execute(httpPost)) {
|
||||||
|
final int statusCode = response.getCode();
|
||||||
|
HttpEntity responseEntity = response.getEntity();
|
||||||
|
String responseString = "";
|
||||||
|
if (responseEntity != null) {
|
||||||
|
responseString = EntityUtils.toString(responseEntity, "UTF-8");
|
||||||
|
}
|
||||||
|
if (statusCode == 200) {
|
||||||
|
System.out.println(responseString);
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(statusCode + " - " + responseString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String runSync(OllamaModel ollamaModel, String promptText) throws OllamaBaseException, IOException {
|
||||||
|
Gson gson = new Gson();
|
||||||
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(ollamaModel.getModel(), promptText);
|
||||||
|
URL obj = new URL(this.host + "/api/generate");
|
||||||
|
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
|
||||||
|
con.setRequestMethod("POST");
|
||||||
|
con.setDoOutput(true);
|
||||||
|
con.setRequestProperty("Content-Type", "application/json");
|
||||||
|
try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
|
||||||
|
wr.writeBytes(ollamaRequestModel.toString());
|
||||||
|
}
|
||||||
|
int responseCode = con.getResponseCode();
|
||||||
|
if (responseCode == HttpURLConnection.HTTP_OK) {
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
|
||||||
|
String inputLine;
|
||||||
|
StringBuilder response = new StringBuilder();
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
OllamaResponseModel ollamaResponseModel = gson.fromJson(inputLine, OllamaResponseModel.class);
|
||||||
|
if (!ollamaResponseModel.getDone()) {
|
||||||
|
response.append(ollamaResponseModel.getResponse());
|
||||||
|
}
|
||||||
|
System.out.println("Streamed response line: " + ollamaResponseModel.getResponse());
|
||||||
|
}
|
||||||
|
in.close();
|
||||||
|
return response.toString();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(con.getResponseCode() + " - " + con.getResponseMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public OllamaAsyncResultCallback runAsync(OllamaModel ollamaModel, String promptText) throws OllamaBaseException, IOException {
|
||||||
|
OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(ollamaModel.getModel(), promptText);
|
||||||
|
URL obj = new URL(this.host + "/api/generate");
|
||||||
|
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
|
||||||
|
con.setRequestMethod("POST");
|
||||||
|
con.setDoOutput(true);
|
||||||
|
con.setRequestProperty("Content-Type", "application/json");
|
||||||
|
try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
|
||||||
|
wr.writeBytes(ollamaRequestModel.toString());
|
||||||
|
}
|
||||||
|
OllamaAsyncResultCallback ollamaAsyncResultCallback = new OllamaAsyncResultCallback(con);
|
||||||
|
ollamaAsyncResultCallback.start();
|
||||||
|
return ollamaAsyncResultCallback;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
58
src/main/java/org/ollama4j/OllamaAsyncResultCallback.java
Normal file
58
src/main/java/org/ollama4j/OllamaAsyncResultCallback.java
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
|
||||||
|
public class OllamaAsyncResultCallback extends Thread {
|
||||||
|
private final HttpURLConnection connection;
|
||||||
|
private String result;
|
||||||
|
private boolean isDone;
|
||||||
|
|
||||||
|
public OllamaAsyncResultCallback(HttpURLConnection con) {
|
||||||
|
this.connection = con;
|
||||||
|
this.isDone = false;
|
||||||
|
this.result = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
Gson gson = new Gson();
|
||||||
|
int responseCode = 0;
|
||||||
|
try {
|
||||||
|
responseCode = this.connection.getResponseCode();
|
||||||
|
if (responseCode == HttpURLConnection.HTTP_OK) {
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(this.connection.getInputStream()))) {
|
||||||
|
String inputLine;
|
||||||
|
StringBuilder response = new StringBuilder();
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
OllamaResponseModel ollamaResponseModel = gson.fromJson(inputLine, OllamaResponseModel.class);
|
||||||
|
if (!ollamaResponseModel.getDone()) {
|
||||||
|
response.append(ollamaResponseModel.getResponse());
|
||||||
|
}
|
||||||
|
// System.out.println("Streamed response line: " + responseModel.getResponse());
|
||||||
|
}
|
||||||
|
in.close();
|
||||||
|
this.isDone = true;
|
||||||
|
this.result = response.toString();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new OllamaBaseException(connection.getResponseCode() + " - " + connection.getResponseMessage());
|
||||||
|
}
|
||||||
|
} catch (IOException | OllamaBaseException e) {
|
||||||
|
this.isDone = true;
|
||||||
|
this.result = "FAILED! " + e.getMessage();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isComplete() {
|
||||||
|
return isDone;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResponse() {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
8
src/main/java/org/ollama4j/OllamaBaseException.java
Normal file
8
src/main/java/org/ollama4j/OllamaBaseException.java
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
public class OllamaBaseException extends Exception {
|
||||||
|
|
||||||
|
public OllamaBaseException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
15
src/main/java/org/ollama4j/OllamaModel.java
Normal file
15
src/main/java/org/ollama4j/OllamaModel.java
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
public enum OllamaModel {
|
||||||
|
LLAMA2("llama2"), MISTRAL("mistral"), MEDLLAMA2("medllama2");
|
||||||
|
|
||||||
|
private final String model;
|
||||||
|
|
||||||
|
OllamaModel(String model) {
|
||||||
|
this.model = model;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getModel() {
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
}
|
34
src/main/java/org/ollama4j/OllamaRequestModel.java
Normal file
34
src/main/java/org/ollama4j/OllamaRequestModel.java
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
|
public class OllamaRequestModel {
|
||||||
|
private String model;
|
||||||
|
private String prompt;
|
||||||
|
|
||||||
|
public OllamaRequestModel(String model, String prompt) {
|
||||||
|
this.model = model;
|
||||||
|
this.prompt = prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getModel() {
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setModel(String model) {
|
||||||
|
this.model = model;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPrompt() {
|
||||||
|
return prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPrompt(String prompt) {
|
||||||
|
this.prompt = prompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return new Gson().toJson(this);
|
||||||
|
}
|
||||||
|
}
|
62
src/main/java/org/ollama4j/OllamaResponseModel.java
Normal file
62
src/main/java/org/ollama4j/OllamaResponseModel.java
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
package org.ollama4j;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public
|
||||||
|
class OllamaResponseModel {
|
||||||
|
private String model;
|
||||||
|
private String created_at;
|
||||||
|
private String response;
|
||||||
|
private Boolean done;
|
||||||
|
private List<Integer> context;
|
||||||
|
private Long total_duration;
|
||||||
|
private Long load_duration;
|
||||||
|
private Long prompt_eval_duration;
|
||||||
|
private Long eval_duration;
|
||||||
|
private Integer prompt_eval_count;
|
||||||
|
private Integer eval_count;
|
||||||
|
|
||||||
|
public String getModel() {
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCreated_at() {
|
||||||
|
return created_at;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResponse() {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getDone() {
|
||||||
|
return done;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Integer> getContext() {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getTotal_duration() {
|
||||||
|
return total_duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getLoad_duration() {
|
||||||
|
return load_duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getPrompt_eval_duration() {
|
||||||
|
return prompt_eval_duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getEval_duration() {
|
||||||
|
return eval_duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getPrompt_eval_count() {
|
||||||
|
return prompt_eval_count;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getEval_count() {
|
||||||
|
return eval_count;
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user