mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-10-28 07:00:41 +01:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd1a57c7e0 | ||
|
|
7fabead249 | ||
|
|
268a973d5e | ||
|
|
d949a3cb69 | ||
|
|
e2443ed68a | ||
|
|
37193b1f5b | ||
|
|
e33071ae38 | ||
|
|
fffc8dc526 | ||
|
|
def950cc9c | ||
|
|
f4db7ca326 | ||
|
|
18760250ea | ||
|
|
233597efd1 | ||
|
|
cec9f29eb7 | ||
|
|
20cb92a418 | ||
|
|
b0dc38954b | ||
|
|
1479d0a494 | ||
|
|
b328daee43 |
36
README.md
36
README.md
@@ -4,12 +4,11 @@
|
||||
<img src='https://raw.githubusercontent.com/ollama4j/ollama4j/65a9d526150da8fcd98e2af6a164f055572bf722/ollama4j.jpeg' width='100' alt="ollama4j-icon">
|
||||
</p>
|
||||
|
||||
|
||||
A Java library (wrapper/binding) for [Ollama](https://ollama.ai/) server.
|
||||
<div align="center">
|
||||
A Java library (wrapper/binding) for Ollama server.
|
||||
|
||||
Find more details on the [website](https://ollama4j.github.io/ollama4j/).
|
||||
|
||||
<div align="center">
|
||||
|
||||

|
||||

|
||||
@@ -268,7 +267,10 @@ make integration-tests
|
||||
|
||||
Newer artifacts are published via GitHub Actions CI workflow when a new release is created from `main` branch.
|
||||
|
||||
#### Who's using Ollama4j?
|
||||
## ⭐ Give us a Star!
|
||||
If you like or are using this project to build your own, please give us a star. It's a free way to show your support.
|
||||
|
||||
## Who's using Ollama4j?
|
||||
|
||||
- `Datafaker`: a library to generate fake data
|
||||
- https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api
|
||||
@@ -277,18 +279,22 @@ Newer artifacts are published via GitHub Actions CI workflow when a new release
|
||||
- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the
|
||||
server to translate all messages into a specfic target language.
|
||||
- https://github.com/liebki/ollama-translator
|
||||
- https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/
|
||||
- `Another Minecraft Mod`: https://www.reddit.com/r/fabricmc/comments/1e65x5s/comment/ldr2vcf/
|
||||
- `Ollama4j Web UI`: A web UI for Ollama written in Java using Spring Boot and Vaadin framework and
|
||||
Ollama4j.
|
||||
- https://github.com/ollama4j/ollama4j-web-ui
|
||||
- `JnsCLI`: A command-line tool for Jenkins that manages jobs, builds, and configurations directly from the terminal while offering AI-powered error analysis for quick troubleshooting.
|
||||
- https://github.com/mirum8/jnscli
|
||||
- `Katie Backend`: An Open Source AI-based question-answering platform that helps companies and organizations make their private domain knowledge accessible and useful to their employees and customers.
|
||||
- https://github.com/wyona/katie-backend
|
||||
- `TeleLlama3 Bot`: A Question-Answering Telegram Bot.
|
||||
- https://git.hiast.edu.sy/mohamadbashar.disoki/telellama3-bot
|
||||
|
||||
#### Traction
|
||||
## Traction
|
||||
|
||||
[](https://star-history.com/#ollama4j/ollama4j&Date)
|
||||
|
||||
### Get Involved
|
||||
## Get Involved
|
||||
|
||||
<div align="center">
|
||||
|
||||
@@ -316,6 +322,22 @@ Contributions are most welcome! Whether it's reporting a bug, proposing an enhan
|
||||
with code - any sort
|
||||
of contribution is much appreciated.
|
||||
|
||||
## 🏷️ License and Citation
|
||||
|
||||
The code is available under [MIT License](./LICENSE).
|
||||
|
||||
If you find this project helpful in your research, please cite this work at
|
||||
|
||||
```
|
||||
@misc{ollama4j2024,
|
||||
author = {Amith Koujalgi},
|
||||
title = {Ollama4j: A Java Library (Wrapper/Binding) for Ollama Server},
|
||||
year = {2024},
|
||||
month = {January},
|
||||
url = {https://github.com/ollama4j/ollama4j}
|
||||
}
|
||||
```
|
||||
|
||||
### References
|
||||
|
||||
- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md)
|
||||
|
||||
@@ -8,12 +8,85 @@ Generate embeddings from a model.
|
||||
|
||||
Parameters:
|
||||
|
||||
- `model`: name of model to generate embeddings from
|
||||
- `input`: text/s to generate embeddings for
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.types.OllamaModelType;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaEmbedResponseModel embeddings = ollamaAPI.embed("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
|
||||
|
||||
System.out.println(embeddings);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Or, using the `OllamaEmbedResponseModel`:
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.types.OllamaModelType;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
String host = "http://localhost:11434/";
|
||||
|
||||
OllamaAPI ollamaAPI = new OllamaAPI(host);
|
||||
|
||||
OllamaEmbedResponseModel embeddings = ollamaAPI.embed(new OllamaEmbedRequestModel("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?")));
|
||||
|
||||
System.out.println(embeddings);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You will get a response similar to:
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "all-minilm",
|
||||
"embeddings": [[-0.034674067, 0.030984823, 0.0067988685]],
|
||||
"total_duration": 14173700,
|
||||
"load_duration": 1198800,
|
||||
"prompt_eval_count": 2
|
||||
}
|
||||
````
|
||||
|
||||
:::note
|
||||
|
||||
This is a deprecated API
|
||||
|
||||
:::
|
||||
|
||||
Parameters:
|
||||
|
||||
- `model`: name of model to generate embeddings from
|
||||
- `prompt`: text to generate embeddings for
|
||||
|
||||
```java
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.types.OllamaModelType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Main {
|
||||
@@ -40,11 +113,6 @@ You will get a response similar to:
|
||||
0.009260174818336964,
|
||||
0.23178744316101074,
|
||||
-0.2916173040866852,
|
||||
-0.8924556970596313,
|
||||
0.8785552978515625,
|
||||
-0.34576427936553955,
|
||||
0.5742510557174683,
|
||||
-0.04222835972905159,
|
||||
-0.137906014919281
|
||||
-0.8924556970596313
|
||||
]
|
||||
```
|
||||
@@ -7,8 +7,10 @@ import io.github.ollama4j.models.chat.OllamaChatMessage;
|
||||
import io.github.ollama4j.models.chat.OllamaChatRequest;
|
||||
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
|
||||
import io.github.ollama4j.models.chat.OllamaChatResult;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||
import io.github.ollama4j.models.generate.OllamaStreamHandler;
|
||||
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
||||
@@ -342,7 +344,9 @@ public class OllamaAPI {
|
||||
* @param model name of model to generate embeddings from
|
||||
* @param prompt text to generate embeddings for
|
||||
* @return embeddings
|
||||
* @deprecated Use {@link #embed(String, List<String>)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public List<Double> generateEmbeddings(String model, String prompt)
|
||||
throws IOException, InterruptedException, OllamaBaseException {
|
||||
return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
|
||||
@@ -353,7 +357,9 @@ public class OllamaAPI {
|
||||
*
|
||||
* @param modelRequest request for '/api/embeddings' endpoint
|
||||
* @return embeddings
|
||||
* @deprecated Use {@link #embed(OllamaEmbedRequestModel)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
|
||||
URI uri = URI.create(this.host + "/api/embeddings");
|
||||
String jsonData = modelRequest.toString();
|
||||
@@ -375,6 +381,47 @@ public class OllamaAPI {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embeddings for a given text from a model
|
||||
*
|
||||
* @param model name of model to generate embeddings from
|
||||
* @param inputs text/s to generate embeddings for
|
||||
* @return embeddings
|
||||
*/
|
||||
public OllamaEmbedResponseModel embed(String model, List<String> inputs)
|
||||
throws IOException, InterruptedException, OllamaBaseException {
|
||||
return embed(new OllamaEmbedRequestModel(model, inputs));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embeddings using a {@link OllamaEmbedRequestModel}.
|
||||
*
|
||||
* @param modelRequest request for '/api/embed' endpoint
|
||||
* @return embeddings
|
||||
*/
|
||||
public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest)
|
||||
throws IOException, InterruptedException, OllamaBaseException {
|
||||
URI uri = URI.create(this.host + "/api/embed");
|
||||
String jsonData = Utils.getObjectMapper().writeValueAsString(modelRequest);
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
|
||||
HttpRequest request = HttpRequest.newBuilder(uri)
|
||||
.header("Accept", "application/json")
|
||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||
.build();
|
||||
|
||||
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
|
||||
int statusCode = response.statusCode();
|
||||
String responseBody = response.body();
|
||||
|
||||
if (statusCode == 200) {
|
||||
OllamaEmbedResponseModel embeddingResponse =
|
||||
Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponseModel.class);
|
||||
return embeddingResponse;
|
||||
} else {
|
||||
throw new OllamaBaseException(statusCode + " - " + responseBody);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate response for a question to a model running on Ollama server. This is a sync/blocking
|
||||
|
||||
@@ -8,7 +8,8 @@ import com.fasterxml.jackson.annotation.JsonValue;
|
||||
public enum OllamaChatMessageRole {
|
||||
SYSTEM("system"),
|
||||
USER("user"),
|
||||
ASSISTANT("assistant");
|
||||
ASSISTANT("assistant"),
|
||||
TOOL("tool");
|
||||
|
||||
@JsonValue
|
||||
private String roleName;
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
package io.github.ollama4j.models.embeddings;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static io.github.ollama4j.utils.Utils.getObjectMapper;
|
||||
|
||||
@Data
|
||||
@RequiredArgsConstructor
|
||||
@NoArgsConstructor
|
||||
public class OllamaEmbedRequestModel {
|
||||
@NonNull
|
||||
private String model;
|
||||
|
||||
@NonNull
|
||||
private List<String> input;
|
||||
|
||||
private Map<String, Object> options;
|
||||
|
||||
@JsonProperty(value = "keep_alive")
|
||||
private String keepAlive;
|
||||
|
||||
@JsonProperty(value = "truncate")
|
||||
private Boolean truncate = true;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package io.github.ollama4j.models.embeddings;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@Data
|
||||
public class OllamaEmbedResponseModel {
|
||||
@JsonProperty("model")
|
||||
private String model;
|
||||
|
||||
@JsonProperty("embeddings")
|
||||
private List<List<Double>> embeddings;
|
||||
|
||||
@JsonProperty("total_duration")
|
||||
private long totalDuration;
|
||||
|
||||
@JsonProperty("load_duration")
|
||||
private long loadDuration;
|
||||
|
||||
@JsonProperty("prompt_eval_count")
|
||||
private int promptEvalCount;
|
||||
}
|
||||
@@ -10,12 +10,9 @@ package io.github.ollama4j.types;
|
||||
public class OllamaModelType {
|
||||
public static final String GEMMA = "gemma";
|
||||
public static final String GEMMA2 = "gemma2";
|
||||
|
||||
|
||||
public static final String LLAMA2 = "llama2";
|
||||
public static final String LLAMA3 = "llama3";
|
||||
public static final String LLAMA3_1 = "llama3.1";
|
||||
|
||||
public static final String MISTRAL = "mistral";
|
||||
public static final String MIXTRAL = "mixtral";
|
||||
public static final String LLAVA = "llava";
|
||||
@@ -35,7 +32,6 @@ public class OllamaModelType {
|
||||
public static final String ZEPHYR = "zephyr";
|
||||
public static final String OPENHERMES = "openhermes";
|
||||
public static final String QWEN = "qwen";
|
||||
|
||||
public static final String QWEN2 = "qwen2";
|
||||
public static final String WIZARDCODER = "wizardcoder";
|
||||
public static final String LLAMA2_CHINESE = "llama2-chinese";
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.github.ollama4j.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
||||
/** Builder class for creating options for Ollama model. */
|
||||
@@ -207,6 +208,34 @@ public class OptionsBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative to the top_p, and aims to ensure a balance of qualityand variety. The parameter p
|
||||
* represents the minimum probability for a token to be considered, relative to the probability
|
||||
* of the most likely token. For example, with p=0.05 and the most likely token having a
|
||||
* probability of 0.9, logits with a value less than 0.045 are filtered out. (Default: 0.0)
|
||||
*/
|
||||
public OptionsBuilder setMinP(float value) {
|
||||
options.getOptionsMap().put("min_p", value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows passing an option not formally supported by the library
|
||||
* @param name The option name for the parameter.
|
||||
* @param value The value for the "{name}" parameter.
|
||||
* @return The updated OptionsBuilder.
|
||||
* @throws IllegalArgumentException if parameter has an unsupported type
|
||||
*/
|
||||
public OptionsBuilder setCustomOption(String name, Object value) throws IllegalArgumentException {
|
||||
if (!(value instanceof Integer || value instanceof Float || value instanceof String)) {
|
||||
throw new IllegalArgumentException("Invalid type for parameter. Allowed types are: Integer, Float, or String.");
|
||||
}
|
||||
options.getOptionsMap().put(name, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Builds the options map.
|
||||
*
|
||||
@@ -215,4 +244,6 @@ public class OptionsBuilder {
|
||||
public Options build() {
|
||||
return options;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.github.ollama4j.unittests.jackson;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrowsExactly;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
@@ -59,6 +60,10 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
|
||||
.withOptions(b.setSeed(1).build())
|
||||
.withOptions(b.setTopK(1).build())
|
||||
.withOptions(b.setTopP(1).build())
|
||||
.withOptions(b.setMinP(1).build())
|
||||
.withOptions(b.setCustomOption("cust_float", 1.0f).build())
|
||||
.withOptions(b.setCustomOption("cust_int", 1).build())
|
||||
.withOptions(b.setCustomOption("cust_str", "custom").build())
|
||||
.build();
|
||||
|
||||
String jsonRequest = serialize(req);
|
||||
@@ -72,6 +77,20 @@ public class TestChatRequestSerialization extends AbstractSerializationTest<Olla
|
||||
assertEquals(1, deserializeRequest.getOptions().get("seed"));
|
||||
assertEquals(1, deserializeRequest.getOptions().get("top_k"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("min_p"));
|
||||
assertEquals(1.0, deserializeRequest.getOptions().get("cust_float"));
|
||||
assertEquals(1, deserializeRequest.getOptions().get("cust_int"));
|
||||
assertEquals("custom", deserializeRequest.getOptions().get("cust_str"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequestWithInvalidCustomOption() {
|
||||
OptionsBuilder b = new OptionsBuilder();
|
||||
assertThrowsExactly(IllegalArgumentException.class, () -> {
|
||||
OllamaChatRequest req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
|
||||
.withOptions(b.setCustomOption("cust_obj", new Object()).build())
|
||||
.build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
Reference in New Issue
Block a user