From 63d4de4e246e8aa6b0871fd7ca93b5424ff1d101 Mon Sep 17 00:00:00 2001 From: Markus Klenke Date: Sun, 25 Feb 2024 20:53:45 +0000 Subject: [PATCH 01/69] Adds options to EmbeddingsRequest Additionally refactors the Embedding Models and Tests --- README.md | 12 ++--- pom.xml | 2 +- .../ollama4j/core/OllamaAPI.java | 19 +++++-- .../OllamaEmbeddingResponseModel.java} | 4 +- .../OllamaEmbeddingsRequestBuilder.java | 31 ++++++++++++ .../OllamaEmbeddingsRequestModel.java | 33 +++++++++++++ .../request/ModelEmbeddingsRequest.java | 23 --------- .../integrationtests/TestRealAPIs.java | 49 +++++++++++++------ .../AbstractRequestSerializationTest.java | 35 +++++++++++++ .../jackson/TestChatRequestSerialization.java | 44 +++-------------- .../TestEmbeddingsRequestSerialization.java | 37 ++++++++++++++ .../TestGenerateRequestSerialization.java | 35 ++----------- 12 files changed, 203 insertions(+), 121 deletions(-) rename src/main/java/io/github/amithkoujalgi/ollama4j/core/models/{EmbeddingResponse.java => embeddings/OllamaEmbeddingResponseModel.java} (65%) create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestBuilder.java create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestModel.java delete mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/ModelEmbeddingsRequest.java create mode 100644 src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java create mode 100644 src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java diff --git a/README.md b/README.md index 42270f3..6cdfc25 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ In your Maven project, add this dependency: io.github.amithkoujalgi ollama4j - 1.0.47 + 1.0.57 ``` @@ -125,15 +125,15 @@ Actions CI workflow. - [x] Update request body creation with Java objects - [ ] Async APIs for images - [ ] Add custom headers to requests -- [ ] Add additional params for `ask` APIs such as: +- [x] Add additional params for `ask` APIs such as: - [x] `options`: additional model parameters for the Modelfile such as `temperature` - Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values). - - [ ] `system`: system prompt to (overrides what is defined in the Modelfile) - - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) - - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a + - [x] `system`: system prompt to (overrides what is defined in the Modelfile) + - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile) + - [x] `context`: the context parameter returned from a previous request, which can be used to keep a short conversational memory - - [ ] `stream`: Add support for streaming responses from the model + - [x] `stream`: Add support for streaming responses from the model - [ ] Add test cases - [ ] Handle exceptions better (maybe throw more appropriate exceptions) diff --git a/pom.xml b/pom.xml index d375a54..496c817 100644 --- a/pom.xml +++ b/pom.xml @@ -99,7 +99,7 @@ ${skipUnitTests} - **/unittests/*.java + **/unittests/**/*.java diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java index ec772f1..25b3a37 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java @@ -6,10 +6,11 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest; import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest; -import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest; import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest; import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller; import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller; @@ -313,8 +314,18 @@ public class OllamaAPI { */ public List generateEmbeddings(String model, String prompt) throws IOException, InterruptedException, OllamaBaseException { + return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); + } + + /** + * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. + * + * @param modelRequest request for '/api/embeddings' endpoint + * @return embeddings + */ + public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{ URI uri = URI.create(this.host + "/api/embeddings"); - String jsonData = new ModelEmbeddingsRequest(model, prompt).toString(); + String jsonData = modelRequest.toString(); HttpClient httpClient = HttpClient.newHttpClient(); HttpRequest.Builder requestBuilder = getRequestBuilderDefault(uri) @@ -325,8 +336,8 @@ public class OllamaAPI { int statusCode = response.statusCode(); String responseBody = response.body(); if (statusCode == 200) { - EmbeddingResponse embeddingResponse = - Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class); + OllamaEmbeddingResponseModel embeddingResponse = + Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); return embeddingResponse.getEmbedding(); } else { throw new OllamaBaseException(statusCode + " - " + responseBody); diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/EmbeddingResponse.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingResponseModel.java similarity index 65% rename from src/main/java/io/github/amithkoujalgi/ollama4j/core/models/EmbeddingResponse.java rename to src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingResponseModel.java index e3040a2..85dba31 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/EmbeddingResponse.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingResponseModel.java @@ -1,4 +1,4 @@ -package io.github.amithkoujalgi.ollama4j.core.models; +package io.github.amithkoujalgi.ollama4j.core.models.embeddings; import com.fasterxml.jackson.annotation.JsonProperty; @@ -7,7 +7,7 @@ import lombok.Data; @SuppressWarnings("unused") @Data -public class EmbeddingResponse { +public class OllamaEmbeddingResponseModel { @JsonProperty("embedding") private List embedding; } diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestBuilder.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestBuilder.java new file mode 100644 index 0000000..ef7a84e --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestBuilder.java @@ -0,0 +1,31 @@ +package io.github.amithkoujalgi.ollama4j.core.models.embeddings; + +import io.github.amithkoujalgi.ollama4j.core.utils.Options; + +public class OllamaEmbeddingsRequestBuilder { + + private OllamaEmbeddingsRequestBuilder(String model, String prompt){ + request = new OllamaEmbeddingsRequestModel(model, prompt); + } + + private OllamaEmbeddingsRequestModel request; + + public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){ + return new OllamaEmbeddingsRequestBuilder(model, prompt); + } + + public OllamaEmbeddingsRequestModel build(){ + return request; + } + + public OllamaEmbeddingsRequestBuilder withOptions(Options options){ + this.request.setOptions(options.getOptionsMap()); + return this; + } + + public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){ + this.request.setKeepAlive(keepAlive); + return this; + } + +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestModel.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestModel.java new file mode 100644 index 0000000..a369124 --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/embeddings/OllamaEmbeddingsRequestModel.java @@ -0,0 +1,33 @@ +package io.github.amithkoujalgi.ollama4j.core.models.embeddings; + +import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; +import java.util.Map; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; + +@Data +@RequiredArgsConstructor +@NoArgsConstructor +public class OllamaEmbeddingsRequestModel { + @NonNull + private String model; + @NonNull + private String prompt; + + protected Map options; + @JsonProperty(value = "keep_alive") + private String keepAlive; + + @Override + public String toString() { + try { + return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/ModelEmbeddingsRequest.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/ModelEmbeddingsRequest.java deleted file mode 100644 index 1455a94..0000000 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/ModelEmbeddingsRequest.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.github.amithkoujalgi.ollama4j.core.models.request; - -import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper; - -import com.fasterxml.jackson.core.JsonProcessingException; -import lombok.AllArgsConstructor; -import lombok.Data; - -@Data -@AllArgsConstructor -public class ModelEmbeddingsRequest { - private String model; - private String prompt; - - @Override - public String toString() { - try { - return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } -} diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java b/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java index dc91287..d822077 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java @@ -10,6 +10,8 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; import java.io.File; import java.io.IOException; @@ -61,7 +63,7 @@ class TestRealAPIs { } catch (HttpConnectTimeoutException e) { fail(e.getMessage()); } catch (Exception e) { - throw new RuntimeException(e); + fail(e); } } @@ -73,7 +75,7 @@ class TestRealAPIs { assertNotNull(ollamaAPI.listModels()); ollamaAPI.listModels().forEach(System.out::println); } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + fail(e); } } @@ -88,7 +90,7 @@ class TestRealAPIs { .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); assertTrue(found); } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + fail(e); } } @@ -101,7 +103,7 @@ class TestRealAPIs { assertNotNull(modelDetails); System.out.println(modelDetails); } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + fail(e); } } @@ -119,7 +121,7 @@ class TestRealAPIs { assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -145,7 +147,7 @@ class TestRealAPIs { assertFalse(result.getResponse().isEmpty()); assertEquals(sb.toString().trim(), result.getResponse().trim()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -163,7 +165,7 @@ class TestRealAPIs { assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -183,7 +185,7 @@ class TestRealAPIs { assertFalse(chatResult.getResponse().isBlank()); assertEquals(4,chatResult.getChatHistory().size()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -205,7 +207,7 @@ class TestRealAPIs { assertTrue(chatResult.getResponse().startsWith("NI")); assertEquals(3, chatResult.getChatHistory().size()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -230,7 +232,7 @@ class TestRealAPIs { assertNotNull(chatResult); assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -261,7 +263,7 @@ class TestRealAPIs { } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -278,7 +280,7 @@ class TestRealAPIs { OllamaChatResult chatResult = ollamaAPI.chat(requestModel); assertNotNull(chatResult); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -298,7 +300,7 @@ class TestRealAPIs { assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -322,7 +324,7 @@ class TestRealAPIs { assertFalse(result.getResponse().isEmpty()); assertEquals(sb.toString().trim(), result.getResponse().trim()); } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + fail(e); } } @@ -342,7 +344,24 @@ class TestRealAPIs { assertNotNull(result.getResponse()); assertFalse(result.getResponse().isEmpty()); } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + fail(e); + } + } + + @Test + @Order(3) + public void testEmbedding() { + testEndpointReachability(); + try { + OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder + .getInstance(config.getModel(), "What is the capital of France?").build(); + + List embeddings = ollamaAPI.generateEmbeddings(request); + + assertNotNull(embeddings); + assertFalse(embeddings.isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); } } } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java new file mode 100644 index 0000000..c6b2ff5 --- /dev/null +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java @@ -0,0 +1,35 @@ +package io.github.amithkoujalgi.ollama4j.unittests.jackson; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.github.amithkoujalgi.ollama4j.core.utils.Utils; + +public abstract class AbstractRequestSerializationTest { + + protected ObjectMapper mapper = Utils.getObjectMapper(); + + protected String serializeRequest(T req) { + try { + return mapper.writeValueAsString(req); + } catch (JsonProcessingException e) { + fail("Could not serialize request!", e); + return null; + } + } + + protected T deserializeRequest(String jsonRequest, Class requestClass) { + try { + return mapper.readValue(jsonRequest, requestClass); + } catch (JsonProcessingException e) { + fail("Could not deserialize jsonRequest!", e); + return null; + } + } + + protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest, + T req) { + assertEquals(req, unmarshalledRequest); + } +} diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index f5fa5c9..c5a7060 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -1,7 +1,6 @@ package io.github.amithkoujalgi.ollama4j.unittests.jackson; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.util.List; @@ -10,21 +9,15 @@ import org.json.JSONObject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; -import io.github.amithkoujalgi.ollama4j.core.utils.Utils; -public class TestChatRequestSerialization { +public class TestChatRequestSerialization extends AbstractRequestSerializationTest{ private OllamaChatRequestBuilder builder; - private ObjectMapper mapper = Utils.getObjectMapper(); - @BeforeEach public void init() { builder = OllamaChatRequestBuilder.getInstance("DummyModel"); @@ -32,10 +25,9 @@ public class TestChatRequestSerialization { @Test public void testRequestOnlyMandatoryFields() { - OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", - List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); + OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); } @Test @@ -44,7 +36,7 @@ public class TestChatRequestSerialization { .withMessage(OllamaChatMessageRole.USER, "Some prompt") .build(); String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); } @Test @@ -52,7 +44,7 @@ public class TestChatRequestSerialization { OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); } @Test @@ -62,7 +54,7 @@ public class TestChatRequestSerialization { .withOptions(b.setMirostat(1).build()).build(); String jsonRequest = serializeRequest(req); - OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest); + OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); } @@ -79,28 +71,4 @@ public class TestChatRequestSerialization { String requestFormatProperty = jsonObject.getString("format"); assertEquals("json", requestFormatProperty); } - - private String serializeRequest(OllamaChatRequestModel req) { - try { - return mapper.writeValueAsString(req); - } catch (JsonProcessingException e) { - fail("Could not serialize request!", e); - return null; - } - } - - private OllamaChatRequestModel deserializeRequest(String jsonRequest) { - try { - return mapper.readValue(jsonRequest, OllamaChatRequestModel.class); - } catch (JsonProcessingException e) { - fail("Could not deserialize jsonRequest!", e); - return null; - } - } - - private void assertEqualsAfterUnmarshalling(OllamaChatRequestModel unmarshalledRequest, - OllamaChatRequestModel req) { - assertEquals(req, unmarshalledRequest); - } - } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java new file mode 100644 index 0000000..ff1e308 --- /dev/null +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java @@ -0,0 +1,37 @@ +package io.github.amithkoujalgi.ollama4j.unittests.jackson; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; +import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; + +public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest{ + + private OllamaEmbeddingsRequestBuilder builder; + + @BeforeEach + public void init() { + builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt"); + } + + @Test + public void testRequestOnlyMandatoryFields() { + OllamaEmbeddingsRequestModel req = builder.build(); + String jsonRequest = serializeRequest(req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req); + } + + @Test + public void testRequestWithOptions() { + OptionsBuilder b = new OptionsBuilder(); + OllamaEmbeddingsRequestModel req = builder + .withOptions(b.setMirostat(1).build()).build(); + + String jsonRequest = serializeRequest(req); + OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class); + assertEqualsAfterUnmarshalling(deserializeRequest, req); + assertEquals(1, deserializeRequest.getOptions().get("mirostat")); + } +} diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java index 7cf0513..03610f7 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java @@ -1,26 +1,20 @@ package io.github.amithkoujalgi.ollama4j.unittests.jackson; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.fail; import org.json.JSONObject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; -import io.github.amithkoujalgi.ollama4j.core.utils.Utils; -public class TestGenerateRequestSerialization { +public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest{ private OllamaGenerateRequestBuilder builder; - private ObjectMapper mapper = Utils.getObjectMapper(); - @BeforeEach public void init() { builder = OllamaGenerateRequestBuilder.getInstance("DummyModel"); @@ -31,7 +25,7 @@ public class TestGenerateRequestSerialization { OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest), req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req); } @Test @@ -41,7 +35,7 @@ public class TestGenerateRequestSerialization { builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); String jsonRequest = serializeRequest(req); - OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest); + OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); } @@ -59,27 +53,4 @@ public class TestGenerateRequestSerialization { assertEquals("json", requestFormatProperty); } - private String serializeRequest(OllamaGenerateRequestModel req) { - try { - return mapper.writeValueAsString(req); - } catch (JsonProcessingException e) { - fail("Could not serialize request!", e); - return null; - } - } - - private OllamaGenerateRequestModel deserializeRequest(String jsonRequest) { - try { - return mapper.readValue(jsonRequest, OllamaGenerateRequestModel.class); - } catch (JsonProcessingException e) { - fail("Could not deserialize jsonRequest!", e); - return null; - } - } - - private void assertEqualsAfterUnmarshalling(OllamaGenerateRequestModel unmarshalledRequest, - OllamaGenerateRequestModel req) { - assertEquals(req, unmarshalledRequest); - } - } From 9c46b510d867f253e549da75e5b3e143bf2c7f7b Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 28 Feb 2024 12:55:26 +0000 Subject: [PATCH 02/69] [maven-release-plugin] prepare release v1.0.57 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 496c817..62041c6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.57-SNAPSHOT + 1.0.57 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.57 From 20774fca6bfee9a6922deb34fb9553b819deae39 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 28 Feb 2024 12:55:28 +0000 Subject: [PATCH 03/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 62041c6..ebc477e 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.57 + 1.0.58-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.57 + v1.0.16 From d0b0a0fc979e81b24bb4824fbe2821e14d724d47 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Thu, 29 Feb 2024 08:54:03 +0530 Subject: [PATCH 04/69] Updated model types --- .../ollama4j/core/types/OllamaModelType.java | 91 +++++++++++-------- 1 file changed, 53 insertions(+), 38 deletions(-) diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java index 96bcc43..cc8280c 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java @@ -8,57 +8,72 @@ package io.github.amithkoujalgi.ollama4j.core.types; */ @SuppressWarnings("ALL") public class OllamaModelType { + public static final String GEMMA = "gemma"; public static final String LLAMA2 = "llama2"; public static final String MISTRAL = "mistral"; - public static final String LLAVA = "llava"; public static final String MIXTRAL = "mixtral"; - public static final String STARLING_LM = "starling-lm"; + public static final String LLAVA = "llava"; public static final String NEURAL_CHAT = "neural-chat"; public static final String CODELLAMA = "codellama"; - public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; + public static final String MISTRAL_OPENORCA = "mistral-openorca"; + public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; + public static final String PHI = "phi"; public static final String ORCA_MINI = "orca-mini"; + public static final String DEEPSEEK_CODER = "deepseek-coder"; + public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; public static final String VICUNA = "vicuna"; public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; - public static final String PHIND_CODELLAMA = "phind-codellama"; - public static final String PHI = "phi"; public static final String ZEPHYR = "zephyr"; + public static final String OPENHERMES = "openhermes"; + public static final String QWEN = "qwen"; public static final String WIZARDCODER = "wizardcoder"; - public static final String MISTRAL_OPENORCA = "mistral-openorca"; - public static final String NOUS_HERMES = "nous-hermes"; - public static final String DEEPSEEK_CODER = "deepseek-coder"; - public static final String WIZARD_MATH = "wizard-math"; public static final String LLAMA2_CHINESE = "llama2-chinese"; - public static final String FALCON = "falcon"; - public static final String ORCA2 = "orca2"; - public static final String STABLE_BELUGA = "stable-beluga"; - public static final String CODEUP = "codeup"; - public static final String EVERYTHINGLM = "everythinglm"; - public static final String MEDLLAMA2 = "medllama2"; - public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; - public static final String STARCODER = "starcoder"; - public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral"; + public static final String TINYLLAMA = "tinyllama"; + public static final String PHIND_CODELLAMA = "phind-codellama"; public static final String OPENCHAT = "openchat"; - public static final String WIZARD_VICUNA = "wizard-vicuna"; - public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral"; - public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; + public static final String ORCA2 = "orca2"; + public static final String FALCON = "falcon"; + public static final String WIZARD_MATH = "wizard-math"; + public static final String TINYDOLPHIN = "tinydolphin"; + public static final String NOUS_HERMES = "nous-hermes"; public static final String YI = "yi"; - public static final String YARN_MISTRAL = "yarn-mistral"; - public static final String SAMANTHA_MISTRAL = "samantha-mistral"; - public static final String SQLCODER = "sqlcoder"; - public static final String YARN_LLAMA2 = "yarn-llama2"; - public static final String MEDITRON = "meditron"; - public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; - public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral"; - public static final String DEEPSEEK_LLM = "deepseek-llm"; - public static final String MISTRALLITE = "mistrallite"; - public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral"; - public static final String WIZARDLM = "wizardlm"; - public static final String CODEBOOGA = "codebooga"; - public static final String MAGICODER = "magicoder"; - public static final String GOLIATH = "goliath"; - public static final String NEXUSRAVEN = "nexusraven"; - public static final String ALFRED = "alfred"; - public static final String XWINLM = "xwinlm"; + public static final String DOLPHIN_PHI = "dolphin-phi"; + public static final String STARLING_LM = "starling-lm"; + public static final String STARCODER = "starcoder"; + public static final String CODEUP = "codeup"; + public static final String MEDLLAMA2 = "medllama2"; + public static final String STABLE_CODE = "stable-code"; + public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; public static final String BAKLLAVA = "bakllava"; + public static final String EVERYTHINGLM = "everythinglm"; + public static final String SOLAR = "solar"; + public static final String STABLE_BELUGA = "stable-beluga"; + public static final String SQLCODER = "sqlcoder"; + public static final String YARN_MISTRAL = "yarn-mistral"; + public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; + public static final String SAMANTHA_MISTRAL = "samantha-mistral"; + public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; + public static final String MEDITRON = "meditron"; + public static final String WIZARD_VICUNA = "wizard-vicuna"; + public static final String STABLELM2 = "stablelm2"; + public static final String MAGICODER = "magicoder"; + public static final String YARN_LLAMA2 = "yarn-llama2"; + public static final String NOUS_HERMES2 = "nous-hermes2"; + public static final String DEEPSEEK_LLM = "deepseek-llm"; + public static final String LLAMA_PRO = "llama-pro"; + public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; + public static final String CODEBOOGA = "codebooga"; + public static final String MISTRALLITE = "mistrallite"; + public static final String NEXUSRAVEN = "nexusraven"; + public static final String GOLIATH = "goliath"; + public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; + public static final String NOTUX = "notux"; + public static final String ALFRED = "alfred"; + public static final String MEGADOLPHIN = "megadolphin"; + public static final String WIZARDLM = "wizardlm"; + public static final String XWINLM = "xwinlm"; + public static final String NOTUS = "notus"; + public static final String DUCKDB_NSQL = "duckdb-nsql"; + public static final String ALL_MINILM = "all-minilm"; } From b1ec12c4e920d899789f177f1a79bc621f3befdc Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 29 Feb 2024 03:25:17 +0000 Subject: [PATCH 05/69] [maven-release-plugin] prepare release v1.0.58 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index ebc477e..e42d09f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.58-SNAPSHOT + 1.0.58 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.58 From 11701fb222135be7ce8eed1eae32c66d28cca9ad Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 29 Feb 2024 03:25:18 +0000 Subject: [PATCH 06/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index e42d09f..156b59c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.58 + 1.0.59-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.58 + v1.0.16 From 7336668f0c68f1227b20c2fcd1227c11dc45b210 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 2 Mar 2024 20:10:04 +0530 Subject: [PATCH 07/69] [testing] - Added release-assets upload GH action --- .github/workflows/maven-publish.yml | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index e1ac127..95141b5 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -12,12 +12,23 @@ on: branches: [ "main" ] workflow_dispatch: + +permissions: + contents: read + id-token: write + packages: write + jobs: build: + runs-on: ubuntu-latest + permissions: contents: write packages: write + pull-requests: write + repository-projects: write + steps: - uses: actions/checkout@v3 - name: Set up JDK 11 @@ -65,4 +76,11 @@ jobs: env: MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} - MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} \ No newline at end of file + MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} + + - name: Release Assets + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + files: target/*.jar \ No newline at end of file From e45246a767d5052d14e90d08f816e11978cf7727 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 14:46:50 +0000 Subject: [PATCH 08/69] [maven-release-plugin] prepare release v1.0.59 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 156b59c..2d7ee30 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.59-SNAPSHOT + 1.0.59 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.59 From ba26d620c49244cad4482f91957f23ed6cf3c8e1 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 14:46:51 +0000 Subject: [PATCH 09/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2d7ee30..41ac193 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.59 + 1.0.60-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.59 + v1.0.16 From 976a3b82e57aeaabac22d8f37e7d1bcf19ae03ef Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 2 Mar 2024 20:27:37 +0530 Subject: [PATCH 10/69] [testing] - Added release-assets upload GH action --- .github/workflows/maven-publish.yml | 20 +------------- .github/workflows/release-jar.yml | 42 +++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 19 deletions(-) create mode 100644 .github/workflows/release-jar.yml diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index 95141b5..e1ac127 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -12,23 +12,12 @@ on: branches: [ "main" ] workflow_dispatch: - -permissions: - contents: read - id-token: write - packages: write - jobs: build: - runs-on: ubuntu-latest - permissions: contents: write packages: write - pull-requests: write - repository-projects: write - steps: - uses: actions/checkout@v3 - name: Set up JDK 11 @@ -76,11 +65,4 @@ jobs: env: MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} - MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} - - - name: Release Assets - uses: softprops/action-gh-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - files: target/*.jar \ No newline at end of file + MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} \ No newline at end of file diff --git a/.github/workflows/release-jar.yml b/.github/workflows/release-jar.yml new file mode 100644 index 0000000..c3b34c6 --- /dev/null +++ b/.github/workflows/release-jar.yml @@ -0,0 +1,42 @@ +name: Release JAR + +on: + push: + tags: + - '**' + +permissions: + contents: read + id-token: write + packages: write + +jobs: + build: + + runs-on: ubuntu-latest + + permissions: + contents: write + pull-requests: write + repository-projects: write + + steps: + - uses: actions/checkout@v3 + - name: Set up JDK 11 + uses: actions/setup-java@v3 + with: + java-version: '11' + distribution: 'temurin' + server-id: github # Value of the distributionManagement/repository/id field of the pom.xml + settings-path: ${{ github.workspace }} # location for the settings.xml file + + - name: Build with Maven + run: mvn -B clean install package --file pom.xml + + - name: Release Assets + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if: startsWith(github.ref, 'refs/tags/') + with: + files: target/*.jar From 0f00f05e3dbebbe4a21028b95d890f497e65c6f5 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 14:58:48 +0000 Subject: [PATCH 11/69] [maven-release-plugin] prepare release v1.0.60 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 41ac193..6059ca9 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.60-SNAPSHOT + 1.0.60 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.60 From e5296c1067a8baa7459c14c935c86baff60517bb Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 14:58:49 +0000 Subject: [PATCH 12/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6059ca9..19ce09f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.60 + 1.0.61-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.60 + v1.0.16 From 2cd47dbfaa410bd610c007b6cd47322a14688358 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 2 Mar 2024 20:37:38 +0530 Subject: [PATCH 13/69] [testing] - Added release-assets upload GH action --- .github/workflows/release-jar.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/release-jar.yml b/.github/workflows/release-jar.yml index c3b34c6..b9014ef 100644 --- a/.github/workflows/release-jar.yml +++ b/.github/workflows/release-jar.yml @@ -37,6 +37,5 @@ jobs: uses: softprops/action-gh-release@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: startsWith(github.ref, 'refs/tags/') with: files: target/*.jar From 7dd556293f9e0de42da8eacf22a4c49dce047dd3 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 15:08:57 +0000 Subject: [PATCH 14/69] [maven-release-plugin] prepare release v1.0.61 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 19ce09f..5d07ed3 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.61-SNAPSHOT + 1.0.61 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.61 From c1f3c51f8849b21690a5730fc0c84c3094aa2b89 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 15:08:58 +0000 Subject: [PATCH 15/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 5d07ed3..e064cf7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.61 + 1.0.62-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.61 + v1.0.16 From 1eec22ca1af748298806afe20eaa0f1e79e2b1b3 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 2 Mar 2024 23:18:15 +0530 Subject: [PATCH 16/69] Added release-assets GH action --- .github/workflows/release-jar.yml | 41 ------------------------------- 1 file changed, 41 deletions(-) delete mode 100644 .github/workflows/release-jar.yml diff --git a/.github/workflows/release-jar.yml b/.github/workflows/release-jar.yml deleted file mode 100644 index b9014ef..0000000 --- a/.github/workflows/release-jar.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Release JAR - -on: - push: - tags: - - '**' - -permissions: - contents: read - id-token: write - packages: write - -jobs: - build: - - runs-on: ubuntu-latest - - permissions: - contents: write - pull-requests: write - repository-projects: write - - steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: '11' - distribution: 'temurin' - server-id: github # Value of the distributionManagement/repository/id field of the pom.xml - settings-path: ${{ github.workspace }} # location for the settings.xml file - - - name: Build with Maven - run: mvn -B clean install package --file pom.xml - - - name: Release Assets - uses: softprops/action-gh-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - files: target/*.jar From 18f27775b06f5a5df0b0ad6c258614da5ef23879 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 17:49:31 +0000 Subject: [PATCH 17/69] [maven-release-plugin] prepare release v1.0.62 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index e064cf7..f57b37d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.62-SNAPSHOT + 1.0.62 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.62 From fb4b7a7ce5acbfee4ba1d585dcd5380d10694f23 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sat, 2 Mar 2024 17:49:32 +0000 Subject: [PATCH 18/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index f57b37d..1936330 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.62 + 1.0.63-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.62 + v1.0.16 From 4b2d566fd9a990b05cee588b54c5a33482db3e98 Mon Sep 17 00:00:00 2001 From: anjeongkyun Date: Tue, 19 Mar 2024 20:44:50 +0900 Subject: [PATCH 19/69] Fixes generate method of prompt builder --- docs/docs/apis-generate/prompt-builder.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/apis-generate/prompt-builder.md b/docs/docs/apis-generate/prompt-builder.md index 7950e0e..a798808 100644 --- a/docs/docs/apis-generate/prompt-builder.md +++ b/docs/docs/apis-generate/prompt-builder.md @@ -42,7 +42,7 @@ public class AskPhi { .addSeparator() .add("How do I read a file in Go and print its contents to stdout?"); - OllamaResult response = ollamaAPI.generate(model, promptBuilder.build()); + OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build()); System.out.println(response.getResponse()); } } From e88711a0176963753657865321ffb733a376e0e3 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 20 Mar 2024 10:44:52 +0000 Subject: [PATCH 20/69] [maven-release-plugin] prepare release v1.0.63 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 1936330..6fc9fdc 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.63-SNAPSHOT + 1.0.63 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.63 From 44949c055925efc35b2f736639c4ecae08b23ed0 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 20 Mar 2024 10:44:54 +0000 Subject: [PATCH 21/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6fc9fdc..4d5d054 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.63 + 1.0.64-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.63 + v1.0.16 From fa20daf6e540e8a40bdea4160d28e757f21d6f3a Mon Sep 17 00:00:00 2001 From: anjeongkyun Date: Sun, 21 Apr 2024 23:37:18 +0900 Subject: [PATCH 22/69] Adds test case of testRequestWithOptions --- .../jackson/TestChatRequestSerialization.java | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index c5a7060..f6237cc 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -51,12 +51,27 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe public void testRequestWithOptions() { OptionsBuilder b = new OptionsBuilder(); OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") - .withOptions(b.setMirostat(1).build()).build(); + .withOptions(b.setMirostat(1).build()) + .withOptions(b.setTemperature(1L).build()) + .withOptions(b.setMirostatEta(1L).build()) + .withOptions(b.setMirostatTau(1L).build()) + .withOptions(b.setNumGpu(1).build()) + .withOptions(b.setSeed(1).build()) + .withOptions(b.setTopK(1).build()) + .withOptions(b.setTopP(1).build()) + .build(); String jsonRequest = serializeRequest(req); - OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaChatRequestModel.class); + OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); + assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); + assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta")); + assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau")); + assertEquals(1, deserializeRequest.getOptions().get("num_gpu")); + assertEquals(1, deserializeRequest.getOptions().get("seed")); + assertEquals(1, deserializeRequest.getOptions().get("top_k")); + assertEquals(1.0, deserializeRequest.getOptions().get("top_p")); } @Test From 9900ae92fb44039a394e5c263b2baf44781dce11 Mon Sep 17 00:00:00 2001 From: anjeongkyun Date: Sun, 21 Apr 2024 23:43:49 +0900 Subject: [PATCH 23/69] Adds test of testWithTemplate --- .../unittests/jackson/TestChatRequestSerialization.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index f6237cc..a83376d 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -86,4 +86,12 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe String requestFormatProperty = jsonObject.getString("format"); assertEquals("json", requestFormatProperty); } + + @Test + public void testWithTemplate() { + OllamaChatRequestModel req = builder.withTemplate("System Template") + .build(); + String jsonRequest = serializeRequest(req); + assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req); + } } From ec4abd1c2d805d366600ea80178e367bb7c472a6 Mon Sep 17 00:00:00 2001 From: anjeongkyun Date: Sun, 21 Apr 2024 23:49:42 +0900 Subject: [PATCH 24/69] Adds test of testWithStreaming --- .../unittests/jackson/TestChatRequestSerialization.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index a83376d..c901c54 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -94,4 +94,11 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe String jsonRequest = serializeRequest(req); assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req); } + + @Test + public void testWithStreaming() { + OllamaChatRequestModel req = builder.withStreaming().build(); + String jsonRequest = serializeRequest(req); + assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true); + } } From b21aa6add2d19d530f15b9968c9568fd87588c25 Mon Sep 17 00:00:00 2001 From: anjeongkyun Date: Sun, 21 Apr 2024 23:52:42 +0900 Subject: [PATCH 25/69] Adds test of testWithKeepAlive --- .../unittests/jackson/TestChatRequestSerialization.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index c901c54..e4655bf 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -101,4 +101,13 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe String jsonRequest = serializeRequest(req); assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true); } + + @Test + public void testWithKeepAlive() { + String expectedKeepAlive = "5m"; + OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) + .build(); + String jsonRequest = serializeRequest(req); + assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); + } } From 78a5eedc8f93c9dff18ca8b6ce7bbe035fa5dd06 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 22 Apr 2024 04:39:40 +0000 Subject: [PATCH 26/69] [maven-release-plugin] prepare release v1.0.64 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 4d5d054..29f45d8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.64-SNAPSHOT + 1.0.64 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.64 From 2df878c95358c93461b947a911a97d7ec7e2b5c3 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 22 Apr 2024 04:39:41 +0000 Subject: [PATCH 27/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 29f45d8..7bc5a64 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.64 + 1.0.65-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.64 + v1.0.16 From 899fa38805c138a7ee7ee6b89c3d6ca90a1db134 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Mon, 13 May 2024 21:05:20 +0530 Subject: [PATCH 28/69] - Updated newly supported Ollama models - Added `ConsoleOutputStreamHandler` --- docs/docs/apis-generate/chat.md | 129 ++++++++++------ .../core/impl/ConsoleOutputStreamHandler.java | 14 ++ .../ollama4j/core/types/OllamaModelType.java | 139 +++++++++--------- 3 files changed, 166 insertions(+), 116 deletions(-) create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/impl/ConsoleOutputStreamHandler.java diff --git a/docs/docs/apis-generate/chat.md b/docs/docs/apis-generate/chat.md index c30b998..5c4dc20 100644 --- a/docs/docs/apis-generate/chat.md +++ b/docs/docs/apis-generate/chat.md @@ -4,7 +4,7 @@ sidebar_position: 7 # Chat -This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including +This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including information using the history of already asked questions and the respective answers. ## Create a new conversation and use chat history to augment follow up questions @@ -20,8 +20,8 @@ public class Main { OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); // create first user question - OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,"What is the capital of France?") - .build(); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") + .build(); // start conversation with model OllamaChatResult chatResult = ollamaAPI.chat(requestModel); @@ -29,7 +29,7 @@ public class Main { System.out.println("First answer: " + chatResult.getResponse()); // create next userQuestion - requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER,"And what is the second largest city?").build(); + requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build(); // "continue" conversation with model chatResult = ollamaAPI.chat(requestModel); @@ -41,32 +41,38 @@ public class Main { } ``` + You will get a response similar to: > First answer: Should be Paris! -> +> > Second answer: Marseille. -> +> > Chat History: ```json -[ { - "role" : "user", - "content" : "What is the capital of France?", - "images" : [ ] - }, { - "role" : "assistant", - "content" : "Should be Paris!", - "images" : [ ] - }, { - "role" : "user", - "content" : "And what is the second largest city?", - "images" : [ ] - }, { - "role" : "assistant", - "content" : "Marseille.", - "images" : [ ] - } ] +[ + { + "role": "user", + "content": "What is the capital of France?", + "images": [] + }, + { + "role": "assistant", + "content": "Should be Paris!", + "images": [] + }, + { + "role": "user", + "content": "And what is the second largest city?", + "images": [] + }, + { + "role": "assistant", + "content": "Marseille.", + "images": [] + } +] ``` ## Create a conversation where the answer is streamed @@ -81,30 +87,50 @@ public class Main { OllamaAPI ollamaAPI = new OllamaAPI(host); OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); + "What is the capital of France? And what's France's connection with Mona Lisa?") + .build(); // define a handler (Consumer) OllamaStreamHandler streamHandler = (s) -> { - System.out.println(s); + System.out.println(s); }; - OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler); + OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler); } } ``` + You will get a response similar to: > The > The capital > The capital of > The capital of France -> The capital of France is +> The capital of France is > The capital of France is Paris > The capital of France is Paris. +## Use a simple Console Output Stream Handler + +``` +import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; + +public class Main { + public static void main(String[] args) throws Exception { + String host = "http://localhost:11434/"; + OllamaAPI ollamaAPI = new OllamaAPI(host); + + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!") + .build(); + OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler(); + ollamaAPI.chat(requestModel, streamHandler); + } +} +``` ## Create a new conversation with individual system prompt + ```java public class Main { @@ -117,8 +143,8 @@ public class Main { // create request with system-prompt (overriding the model defaults) and user question OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") - .withMessage(OllamaChatMessageRole.USER,"What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); + .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?") + .build(); // start conversation with model OllamaChatResult chatResult = ollamaAPI.chat(requestModel); @@ -128,6 +154,7 @@ public class Main { } ``` + You will get a response similar to: > NI. @@ -139,34 +166,40 @@ public class Main { public static void main(String[] args) { - String host = "http://localhost:11434/"; + String host = "http://localhost:11434/"; - OllamaAPI ollamaAPI = new OllamaAPI(host); - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); + OllamaAPI ollamaAPI = new OllamaAPI(host); + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA); - // Load Image from File and attach to user message (alternatively images could also be added via URL) - OllamaChatRequestModel requestModel = - builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", - List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); + // Load Image from File and attach to user message (alternatively images could also be added via URL) + OllamaChatRequestModel requestModel = + builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", + List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); - OllamaChatResult chatResult = ollamaAPI.chat(requestModel); - System.out.println("First answer: " + chatResult.getResponse()); + OllamaChatResult chatResult = ollamaAPI.chat(requestModel); + System.out.println("First answer: " + chatResult.getResponse()); - builder.reset(); + builder.reset(); - // Use history to ask further questions about the image or assistant answer - requestModel = - builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); + // Use history to ask further questions about the image or assistant answer + requestModel = + builder.withMessages(chatResult.getChatHistory()) + .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); - chatResult = ollamaAPI.chat(requestModel); - System.out.println("Second answer: " + chatResult.getResponse()); + chatResult = ollamaAPI.chat(requestModel); + System.out.println("Second answer: " + chatResult.getResponse()); } } ``` You will get a response similar to: -> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early evening, given the warm lighting and the low position of the sun in the sky. +> First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two +> levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and +> comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early +> evening, given the warm lighting and the low position of the sun in the sky. > -> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed confidently. \ No newline at end of file +> Second Answer: Based on the image, it's difficult to definitively determine the breed of the dog. However, the dog +> appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever +> or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed +> confidently. \ No newline at end of file diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/impl/ConsoleOutputStreamHandler.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/impl/ConsoleOutputStreamHandler.java new file mode 100644 index 0000000..6807019 --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/impl/ConsoleOutputStreamHandler.java @@ -0,0 +1,14 @@ +package io.github.amithkoujalgi.ollama4j.core.impl; + +import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; + +public class ConsoleOutputStreamHandler implements OllamaStreamHandler { + private final StringBuffer response = new StringBuffer(); + + @Override + public void accept(String message) { + String substr = message.substring(response.length()); + response.append(substr); + System.out.print(substr); + } +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java index cc8280c..d7984d0 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java @@ -8,72 +8,75 @@ package io.github.amithkoujalgi.ollama4j.core.types; */ @SuppressWarnings("ALL") public class OllamaModelType { - public static final String GEMMA = "gemma"; - public static final String LLAMA2 = "llama2"; - public static final String MISTRAL = "mistral"; - public static final String MIXTRAL = "mixtral"; - public static final String LLAVA = "llava"; - public static final String NEURAL_CHAT = "neural-chat"; - public static final String CODELLAMA = "codellama"; - public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; - public static final String MISTRAL_OPENORCA = "mistral-openorca"; - public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; - public static final String PHI = "phi"; - public static final String ORCA_MINI = "orca-mini"; - public static final String DEEPSEEK_CODER = "deepseek-coder"; - public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; - public static final String VICUNA = "vicuna"; - public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; - public static final String ZEPHYR = "zephyr"; - public static final String OPENHERMES = "openhermes"; - public static final String QWEN = "qwen"; - public static final String WIZARDCODER = "wizardcoder"; - public static final String LLAMA2_CHINESE = "llama2-chinese"; - public static final String TINYLLAMA = "tinyllama"; - public static final String PHIND_CODELLAMA = "phind-codellama"; - public static final String OPENCHAT = "openchat"; - public static final String ORCA2 = "orca2"; - public static final String FALCON = "falcon"; - public static final String WIZARD_MATH = "wizard-math"; - public static final String TINYDOLPHIN = "tinydolphin"; - public static final String NOUS_HERMES = "nous-hermes"; - public static final String YI = "yi"; - public static final String DOLPHIN_PHI = "dolphin-phi"; - public static final String STARLING_LM = "starling-lm"; - public static final String STARCODER = "starcoder"; - public static final String CODEUP = "codeup"; - public static final String MEDLLAMA2 = "medllama2"; - public static final String STABLE_CODE = "stable-code"; - public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; - public static final String BAKLLAVA = "bakllava"; - public static final String EVERYTHINGLM = "everythinglm"; - public static final String SOLAR = "solar"; - public static final String STABLE_BELUGA = "stable-beluga"; - public static final String SQLCODER = "sqlcoder"; - public static final String YARN_MISTRAL = "yarn-mistral"; - public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; - public static final String SAMANTHA_MISTRAL = "samantha-mistral"; - public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; - public static final String MEDITRON = "meditron"; - public static final String WIZARD_VICUNA = "wizard-vicuna"; - public static final String STABLELM2 = "stablelm2"; - public static final String MAGICODER = "magicoder"; - public static final String YARN_LLAMA2 = "yarn-llama2"; - public static final String NOUS_HERMES2 = "nous-hermes2"; - public static final String DEEPSEEK_LLM = "deepseek-llm"; - public static final String LLAMA_PRO = "llama-pro"; - public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; - public static final String CODEBOOGA = "codebooga"; - public static final String MISTRALLITE = "mistrallite"; - public static final String NEXUSRAVEN = "nexusraven"; - public static final String GOLIATH = "goliath"; - public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; - public static final String NOTUX = "notux"; - public static final String ALFRED = "alfred"; - public static final String MEGADOLPHIN = "megadolphin"; - public static final String WIZARDLM = "wizardlm"; - public static final String XWINLM = "xwinlm"; - public static final String NOTUS = "notus"; - public static final String DUCKDB_NSQL = "duckdb-nsql"; - public static final String ALL_MINILM = "all-minilm"; + public static final String GEMMA = "gemma"; + public static final String LLAMA2 = "llama2"; + public static final String LLAMA3 = "llama3"; + public static final String MISTRAL = "mistral"; + public static final String MIXTRAL = "mixtral"; + public static final String LLAVA = "llava"; + public static final String LLAVA_PHI3 = "llava-phi3"; + public static final String NEURAL_CHAT = "neural-chat"; + public static final String CODELLAMA = "codellama"; + public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral"; + public static final String MISTRAL_OPENORCA = "mistral-openorca"; + public static final String LLAMA2_UNCENSORED = "llama2-uncensored"; + public static final String PHI = "phi"; + public static final String PHI3 = "phi3"; + public static final String ORCA_MINI = "orca-mini"; + public static final String DEEPSEEK_CODER = "deepseek-coder"; + public static final String DOLPHIN_MISTRAL = "dolphin-mistral"; + public static final String VICUNA = "vicuna"; + public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored"; + public static final String ZEPHYR = "zephyr"; + public static final String OPENHERMES = "openhermes"; + public static final String QWEN = "qwen"; + public static final String WIZARDCODER = "wizardcoder"; + public static final String LLAMA2_CHINESE = "llama2-chinese"; + public static final String TINYLLAMA = "tinyllama"; + public static final String PHIND_CODELLAMA = "phind-codellama"; + public static final String OPENCHAT = "openchat"; + public static final String ORCA2 = "orca2"; + public static final String FALCON = "falcon"; + public static final String WIZARD_MATH = "wizard-math"; + public static final String TINYDOLPHIN = "tinydolphin"; + public static final String NOUS_HERMES = "nous-hermes"; + public static final String YI = "yi"; + public static final String DOLPHIN_PHI = "dolphin-phi"; + public static final String STARLING_LM = "starling-lm"; + public static final String STARCODER = "starcoder"; + public static final String CODEUP = "codeup"; + public static final String MEDLLAMA2 = "medllama2"; + public static final String STABLE_CODE = "stable-code"; + public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored"; + public static final String BAKLLAVA = "bakllava"; + public static final String EVERYTHINGLM = "everythinglm"; + public static final String SOLAR = "solar"; + public static final String STABLE_BELUGA = "stable-beluga"; + public static final String SQLCODER = "sqlcoder"; + public static final String YARN_MISTRAL = "yarn-mistral"; + public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral"; + public static final String SAMANTHA_MISTRAL = "samantha-mistral"; + public static final String STABLELM_ZEPHYR = "stablelm-zephyr"; + public static final String MEDITRON = "meditron"; + public static final String WIZARD_VICUNA = "wizard-vicuna"; + public static final String STABLELM2 = "stablelm2"; + public static final String MAGICODER = "magicoder"; + public static final String YARN_LLAMA2 = "yarn-llama2"; + public static final String NOUS_HERMES2 = "nous-hermes2"; + public static final String DEEPSEEK_LLM = "deepseek-llm"; + public static final String LLAMA_PRO = "llama-pro"; + public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2"; + public static final String CODEBOOGA = "codebooga"; + public static final String MISTRALLITE = "mistrallite"; + public static final String NEXUSRAVEN = "nexusraven"; + public static final String GOLIATH = "goliath"; + public static final String NOMIC_EMBED_TEXT = "nomic-embed-text"; + public static final String NOTUX = "notux"; + public static final String ALFRED = "alfred"; + public static final String MEGADOLPHIN = "megadolphin"; + public static final String WIZARDLM = "wizardlm"; + public static final String XWINLM = "xwinlm"; + public static final String NOTUS = "notus"; + public static final String DUCKDB_NSQL = "duckdb-nsql"; + public static final String ALL_MINILM = "all-minilm"; } From 10d2a8f5ff0b631012ba42268843260aa8eabf6e Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 13 May 2024 15:36:37 +0000 Subject: [PATCH 29/69] [maven-release-plugin] prepare release v1.0.65 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 7bc5a64..1367b96 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.65-SNAPSHOT + 1.0.65 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.65 From e4e717b747014b0c00ccc48f482014898d49689a Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 13 May 2024 15:36:38 +0000 Subject: [PATCH 30/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 1367b96..0d5d73c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.65 + 1.0.66-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.65 + v1.0.16 From 04124cf978acb79fa0d5f9306782dc3f2ecd1e95 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Tue, 14 May 2024 10:27:56 +0530 Subject: [PATCH 31/69] Updated default request timeout to 10 seconds --- docs/docs/apis-generate/chat.md | 2 +- .../ollama4j/core/OllamaAPI.java | 1067 ++++++++--------- .../models/request/OllamaEndpointCaller.java | 197 ++- 3 files changed, 628 insertions(+), 638 deletions(-) diff --git a/docs/docs/apis-generate/chat.md b/docs/docs/apis-generate/chat.md index 5c4dc20..b4d51b1 100644 --- a/docs/docs/apis-generate/chat.md +++ b/docs/docs/apis-generate/chat.md @@ -112,7 +112,7 @@ You will get a response similar to: ## Use a simple Console Output Stream Handler -``` +```java import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler; public class Main { diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java index 25b3a37..1f22210 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java @@ -9,18 +9,13 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel; import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; -import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest; -import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest; -import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest; -import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller; -import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller; +import io.github.amithkoujalgi.ollama4j.core.models.request.*; import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; @@ -33,554 +28,552 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Base64; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** The base Ollama API class. */ +/** + * The base Ollama API class. + */ @SuppressWarnings("DuplicatedCode") public class OllamaAPI { - private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); - private final String host; - private long requestTimeoutSeconds = 3; - private boolean verbose = true; - private BasicAuth basicAuth; - - /** - * Instantiates the Ollama API. - * - * @param host the host address of Ollama server - */ - public OllamaAPI(String host) { - if (host.endsWith("/")) { - this.host = host.substring(0, host.length() - 1); - } else { - this.host = host; - } - } - - /** - * Set request timeout in seconds. Default is 3 seconds. - * - * @param requestTimeoutSeconds the request timeout in seconds - */ - public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { - this.requestTimeoutSeconds = requestTimeoutSeconds; - } - - /** - * Set/unset logging of responses - * - * @param verbose true/false - */ - public void setVerbose(boolean verbose) { - this.verbose = verbose; - } - - /** - * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. - * - * @param username the username - * @param password the password - */ - public void setBasicAuth(String username, String password) { - this.basicAuth = new BasicAuth(username, password); - } - - /** - * API to check the reachability of Ollama server. - * - * @return true if the server is reachable, false otherwise. - */ - public boolean ping() { - String url = this.host + "/api/tags"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = null; - try { - httpRequest = - getRequestBuilderDefault(new URI(url)) - .header("Accept", "application/json") - .header("Content-type", "application/json") - .GET() - .build(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - HttpResponse response = null; - try { - response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - } catch (HttpConnectTimeoutException e) { - return false; - } catch (IOException | InterruptedException e) { - throw new RuntimeException(e); - } - int statusCode = response.statusCode(); - return statusCode == 200; - } - - /** - * List available models from Ollama server. - * - * @return the list - */ - public List listModels() - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - String url = this.host + "/api/tags"; - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest httpRequest = - getRequestBuilderDefault(new URI(url)) - .header("Accept", "application/json") - .header("Content-type", "application/json") - .GET() - .build(); - HttpResponse response = - httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper() - .readValue(responseString, ListModelsResponse.class) - .getModels(); - } else { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - } - - /** - * Pull a model on the Ollama server from the list of available models. - * - * @param modelName the name of the model - */ - public void pullModel(String modelName) - throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { - String url = this.host + "/api/pull"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = - getRequestBuilderDefault(new URI(url)) - .POST(HttpRequest.BodyPublishers.ofString(jsonData)) - .header("Accept", "application/json") - .header("Content-type", "application/json") - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = - client.send(request, HttpResponse.BodyHandlers.ofInputStream()); - int statusCode = response.statusCode(); - InputStream responseBodyStream = response.body(); - String responseString = ""; - try (BufferedReader reader = - new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { - ModelPullResponse modelPullResponse = - Utils.getObjectMapper().readValue(line, ModelPullResponse.class); - if (verbose) { - logger.info(modelPullResponse.getStatus()); - } - } - } - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - } - - /** - * Gets model details from the Ollama server. - * - * @param modelName the model - * @return the model details - */ - public ModelDetail getModelDetails(String modelName) - throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { - String url = this.host + "/api/show"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = - getRequestBuilderDefault(new URI(url)) - .header("Accept", "application/json") - .header("Content-type", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(jsonData)) - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); - } else { - throw new OllamaBaseException(statusCode + " - " + responseBody); - } - } - - /** - * Create a custom model from a model file. Read more about custom model file creation here. - * - * @param modelName the name of the custom model to be created. - * @param modelFilePath the path to model file that exists on the Ollama server. - */ - public void createModelWithFilePath(String modelName, String modelFilePath) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/create"; - String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); - HttpRequest request = - getRequestBuilderDefault(new URI(url)) - .header("Accept", "application/json") - .header("Content-Type", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - // FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this - // if the issue is fixed in the Ollama API server. - if (responseString.contains("error")) { - throw new OllamaBaseException(responseString); - } - if (verbose) { - logger.info(responseString); - } - } - - /** - * Create a custom model from a model file. Read more about custom model file creation here. - * - * @param modelName the name of the custom model to be created. - * @param modelFileContents the path to model file that exists on the Ollama server. - */ - public void createModelWithModelFileContents(String modelName, String modelFileContents) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/create"; - String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); - HttpRequest request = - getRequestBuilderDefault(new URI(url)) - .header("Accept", "application/json") - .header("Content-Type", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseString = response.body(); - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseString); - } - if (responseString.contains("error")) { - throw new OllamaBaseException(responseString); - } - if (verbose) { - logger.info(responseString); - } - } - - /** - * Delete a model from Ollama server. - * - * @param modelName the name of the model to be deleted. - * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. - */ - public void deleteModel(String modelName, boolean ignoreIfNotPresent) - throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { - String url = this.host + "/api/delete"; - String jsonData = new ModelRequest(modelName).toString(); - HttpRequest request = - getRequestBuilderDefault(new URI(url)) - .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) - .header("Accept", "application/json") - .header("Content-type", "application/json") - .build(); - HttpClient client = HttpClient.newHttpClient(); - HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) { - return; - } - if (statusCode != 200) { - throw new OllamaBaseException(statusCode + " - " + responseBody); - } - } - - /** - * Generate embeddings for a given text from a model - * - * @param model name of model to generate embeddings from - * @param prompt text to generate embeddings for - * @return embeddings - */ - public List generateEmbeddings(String model, String prompt) - throws IOException, InterruptedException, OllamaBaseException { - return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); - } + private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class); + private final String host; + private long requestTimeoutSeconds = 10; + private boolean verbose = true; + private BasicAuth basicAuth; /** - * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. - * - * @param modelRequest request for '/api/embeddings' endpoint - * @return embeddings - */ - public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{ - URI uri = URI.create(this.host + "/api/embeddings"); - String jsonData = modelRequest.toString(); - HttpClient httpClient = HttpClient.newHttpClient(); - HttpRequest.Builder requestBuilder = - getRequestBuilderDefault(uri) - .header("Accept", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(jsonData)); - HttpRequest request = requestBuilder.build(); - HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - int statusCode = response.statusCode(); - String responseBody = response.body(); - if (statusCode == 200) { - OllamaEmbeddingResponseModel embeddingResponse = - Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); - return embeddingResponse.getEmbedding(); - } else { - throw new OllamaBaseException(statusCode + " - " + responseBody); + * Instantiates the Ollama API. + * + * @param host the host address of Ollama server + */ + public OllamaAPI(String host) { + if (host.endsWith("/")) { + this.host = host.substring(0, host.length() - 1); + } else { + this.host = host; + } } - } - /** - * Generate response for a question to a model running on Ollama server. This is a sync/blocking - * call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - */ - public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); - } - - /** - * Convenience method to call Ollama API without streaming responses. - * - * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} - */ - public OllamaResult generate(String model, String prompt, Options options) - throws OllamaBaseException, IOException, InterruptedException { - return generate(model, prompt, options,null); - } - - /** - * Generate response for a question to a model running on Ollama server and get a callback handle - * that can be used to check for status and get the response from the model later. This would be - * an async/non-blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @return the ollama async result callback handle - */ - public OllamaAsyncResultCallback generateAsync(String model, String prompt) { - OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); - - URI uri = URI.create(this.host + "/api/generate"); - OllamaAsyncResultCallback ollamaAsyncResultCallback = - new OllamaAsyncResultCallback( - getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); - ollamaAsyncResultCallback.start(); - return ollamaAsyncResultCallback; - } - - /** - * With one or more image files, ask a question to a model running on Ollama server. This is a - * sync/blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param imageFiles the list of image files to use for the question - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - */ - public OllamaResult generateWithImageFiles( - String model, String prompt, List imageFiles, Options options, OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException { - List images = new ArrayList<>(); - for (File imageFile : imageFiles) { - images.add(encodeFileToBase64(imageFile)); + /** + * Set request timeout in seconds. Default is 3 seconds. + * + * @param requestTimeoutSeconds the request timeout in seconds + */ + public void setRequestTimeoutSeconds(long requestTimeoutSeconds) { + this.requestTimeoutSeconds = requestTimeoutSeconds; } - OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); - } - /** - * Convenience method to call Ollama API without streaming responses. - * - * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} - */ - public OllamaResult generateWithImageFiles( - String model, String prompt, List imageFiles, Options options) - throws OllamaBaseException, IOException, InterruptedException{ - return generateWithImageFiles(model, prompt, imageFiles, options, null); -} - - /** - * With one or more image URLs, ask a question to a model running on Ollama server. This is a - * sync/blocking call. - * - * @param model the ollama model to ask the question to - * @param prompt the prompt/question text - * @param imageURLs the list of image URLs to use for the question - * @param options the Options object - More - * details on the options - * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. - * @return OllamaResult that includes response text and time taken for response - */ - public OllamaResult generateWithImageURLs( - String model, String prompt, List imageURLs, Options options, OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - List images = new ArrayList<>(); - for (String imageURL : imageURLs) { - images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); + /** + * Set/unset logging of responses + * + * @param verbose true/false + */ + public void setVerbose(boolean verbose) { + this.verbose = verbose; } - OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); - ollamaRequestModel.setOptions(options.getOptionsMap()); - return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler); - } - /** - * Convenience method to call Ollama API without streaming responses. - * - * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} - */ - public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, - Options options) - throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { - return generateWithImageURLs(model, prompt, imageURLs, options, null); - } + /** + * Set basic authentication for accessing Ollama server that's behind a reverse-proxy/gateway. + * + * @param username the username + * @param password the password + */ + public void setBasicAuth(String username, String password) { + this.basicAuth = new BasicAuth(username, password); + } + + /** + * API to check the reachability of Ollama server. + * + * @return true if the server is reachable, false otherwise. + */ + public boolean ping() { + String url = this.host + "/api/tags"; + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest = null; + try { + httpRequest = + getRequestBuilderDefault(new URI(url)) + .header("Accept", "application/json") + .header("Content-type", "application/json") + .GET() + .build(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + HttpResponse response = null; + try { + response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); + } catch (HttpConnectTimeoutException e) { + return false; + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + int statusCode = response.statusCode(); + return statusCode == 200; + } + + /** + * List available models from Ollama server. + * + * @return the list + */ + public List listModels() + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + String url = this.host + "/api/tags"; + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest httpRequest = + getRequestBuilderDefault(new URI(url)) + .header("Accept", "application/json") + .header("Content-type", "application/json") + .GET() + .build(); + HttpResponse response = + httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper() + .readValue(responseString, ListModelsResponse.class) + .getModels(); + } else { + throw new OllamaBaseException(statusCode + " - " + responseString); + } + } + + /** + * Pull a model on the Ollama server from the list of available models. + * + * @param modelName the name of the model + */ + public void pullModel(String modelName) + throws OllamaBaseException, IOException, URISyntaxException, InterruptedException { + String url = this.host + "/api/pull"; + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(url)) + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .header("Accept", "application/json") + .header("Content-type", "application/json") + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = + client.send(request, HttpResponse.BodyHandlers.ofInputStream()); + int statusCode = response.statusCode(); + InputStream responseBodyStream = response.body(); + String responseString = ""; + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + ModelPullResponse modelPullResponse = + Utils.getObjectMapper().readValue(line, ModelPullResponse.class); + if (verbose) { + logger.info(modelPullResponse.getStatus()); + } + } + } + if (statusCode != 200) { + throw new OllamaBaseException(statusCode + " - " + responseString); + } + } + + /** + * Gets model details from the Ollama server. + * + * @param modelName the model + * @return the model details + */ + public ModelDetail getModelDetails(String modelName) + throws IOException, OllamaBaseException, InterruptedException, URISyntaxException { + String url = this.host + "/api/show"; + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(url)) + .header("Accept", "application/json") + .header("Content-type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData)) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 200) { + return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class); + } else { + throw new OllamaBaseException(statusCode + " - " + responseBody); + } + } + + /** + * Create a custom model from a model file. Read more about custom model file creation here. + * + * @param modelName the name of the custom model to be created. + * @param modelFilePath the path to model file that exists on the Ollama server. + */ + public void createModelWithFilePath(String modelName, String modelFilePath) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + String url = this.host + "/api/create"; + String jsonData = new CustomModelFilePathRequest(modelName, modelFilePath).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(url)) + .header("Accept", "application/json") + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode != 200) { + throw new OllamaBaseException(statusCode + " - " + responseString); + } + // FIXME: Ollama API returns HTTP status code 200 for model creation failure cases. Correct this + // if the issue is fixed in the Ollama API server. + if (responseString.contains("error")) { + throw new OllamaBaseException(responseString); + } + if (verbose) { + logger.info(responseString); + } + } + + /** + * Create a custom model from a model file. Read more about custom model file creation here. + * + * @param modelName the name of the custom model to be created. + * @param modelFileContents the path to model file that exists on the Ollama server. + */ + public void createModelWithModelFileContents(String modelName, String modelFileContents) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + String url = this.host + "/api/create"; + String jsonData = new CustomModelFileContentsRequest(modelName, modelFileContents).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(url)) + .header("Accept", "application/json") + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseString = response.body(); + if (statusCode != 200) { + throw new OllamaBaseException(statusCode + " - " + responseString); + } + if (responseString.contains("error")) { + throw new OllamaBaseException(responseString); + } + if (verbose) { + logger.info(responseString); + } + } + + /** + * Delete a model from Ollama server. + * + * @param modelName the name of the model to be deleted. + * @param ignoreIfNotPresent ignore errors if the specified model is not present on Ollama server. + */ + public void deleteModel(String modelName, boolean ignoreIfNotPresent) + throws IOException, InterruptedException, OllamaBaseException, URISyntaxException { + String url = this.host + "/api/delete"; + String jsonData = new ModelRequest(modelName).toString(); + HttpRequest request = + getRequestBuilderDefault(new URI(url)) + .method("DELETE", HttpRequest.BodyPublishers.ofString(jsonData, StandardCharsets.UTF_8)) + .header("Accept", "application/json") + .header("Content-type", "application/json") + .build(); + HttpClient client = HttpClient.newHttpClient(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 404 && responseBody.contains("model") && responseBody.contains("not found")) { + return; + } + if (statusCode != 200) { + throw new OllamaBaseException(statusCode + " - " + responseBody); + } + } + + /** + * Generate embeddings for a given text from a model + * + * @param model name of model to generate embeddings from + * @param prompt text to generate embeddings for + * @return embeddings + */ + public List generateEmbeddings(String model, String prompt) + throws IOException, InterruptedException, OllamaBaseException { + return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt)); + } + + /** + * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}. + * + * @param modelRequest request for '/api/embeddings' endpoint + * @return embeddings + */ + public List generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException { + URI uri = URI.create(this.host + "/api/embeddings"); + String jsonData = modelRequest.toString(); + HttpClient httpClient = HttpClient.newHttpClient(); + HttpRequest.Builder requestBuilder = + getRequestBuilderDefault(uri) + .header("Accept", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonData)); + HttpRequest request = requestBuilder.build(); + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + int statusCode = response.statusCode(); + String responseBody = response.body(); + if (statusCode == 200) { + OllamaEmbeddingResponseModel embeddingResponse = + Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class); + return embeddingResponse.getEmbedding(); + } else { + throw new OllamaBaseException(statusCode + " - " + responseBody); + } + } + + /** + * Generate response for a question to a model running on Ollama server. This is a sync/blocking + * call. + * + * @param model the ollama model to ask the question to + * @param prompt the prompt/question text + * @param options the Options object - More + * details on the options + * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @return OllamaResult that includes response text and time taken for response + */ + public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException { + OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); + ollamaRequestModel.setOptions(options.getOptionsMap()); + return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); + } + + /** + * Convenience method to call Ollama API without streaming responses. + *

+ * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} + */ + public OllamaResult generate(String model, String prompt, Options options) + throws OllamaBaseException, IOException, InterruptedException { + return generate(model, prompt, options, null); + } + + /** + * Generate response for a question to a model running on Ollama server and get a callback handle + * that can be used to check for status and get the response from the model later. This would be + * an async/non-blocking call. + * + * @param model the ollama model to ask the question to + * @param prompt the prompt/question text + * @return the ollama async result callback handle + */ + public OllamaAsyncResultCallback generateAsync(String model, String prompt) { + OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); + + URI uri = URI.create(this.host + "/api/generate"); + OllamaAsyncResultCallback ollamaAsyncResultCallback = + new OllamaAsyncResultCallback( + getRequestBuilderDefault(uri), ollamaRequestModel, requestTimeoutSeconds); + ollamaAsyncResultCallback.start(); + return ollamaAsyncResultCallback; + } + + /** + * With one or more image files, ask a question to a model running on Ollama server. This is a + * sync/blocking call. + * + * @param model the ollama model to ask the question to + * @param prompt the prompt/question text + * @param imageFiles the list of image files to use for the question + * @param options the Options object - More + * details on the options + * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @return OllamaResult that includes response text and time taken for response + */ + public OllamaResult generateWithImageFiles( + String model, String prompt, List imageFiles, Options options, OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException { + List images = new ArrayList<>(); + for (File imageFile : imageFiles) { + images.add(encodeFileToBase64(imageFile)); + } + OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); + ollamaRequestModel.setOptions(options.getOptionsMap()); + return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); + } + + /** + * Convenience method to call Ollama API without streaming responses. + *

+ * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)} + */ + public OllamaResult generateWithImageFiles( + String model, String prompt, List imageFiles, Options options) + throws OllamaBaseException, IOException, InterruptedException { + return generateWithImageFiles(model, prompt, imageFiles, options, null); + } + + /** + * With one or more image URLs, ask a question to a model running on Ollama server. This is a + * sync/blocking call. + * + * @param model the ollama model to ask the question to + * @param prompt the prompt/question text + * @param imageURLs the list of image URLs to use for the question + * @param options the Options object - More + * details on the options + * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. + * @return OllamaResult that includes response text and time taken for response + */ + public OllamaResult generateWithImageURLs( + String model, String prompt, List imageURLs, Options options, OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + List images = new ArrayList<>(); + for (String imageURL : imageURLs) { + images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL))); + } + OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images); + ollamaRequestModel.setOptions(options.getOptionsMap()); + return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); + } + + /** + * Convenience method to call Ollama API without streaming responses. + *

+ * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)} + */ + public OllamaResult generateWithImageURLs(String model, String prompt, List imageURLs, + Options options) + throws OllamaBaseException, IOException, InterruptedException, URISyntaxException { + return generateWithImageURLs(model, prompt, imageURLs, options, null); + } - - /** - * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api - * 'api/chat'. - * - * @param model the ollama model to ask the question to - * @param messages chat history / message stack to send to the model - * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read + /** + * Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api + * 'api/chat'. + * + * @param model the ollama model to ask the question to + * @param messages chat history / message stack to send to the model + * @return {@link OllamaChatResult} containing the api response and the message history including the newly aqcuired assistant response. + * @throws OllamaBaseException any response code than 200 has been returned + * @throws IOException in case the responseStream can not be read * @throws InterruptedException in case the server is not reachable or network issues happen - */ - public OllamaChatResult chat(String model, List messages) throws OllamaBaseException, IOException, InterruptedException{ - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); - return chat(builder.withMessages(messages).build()); - } - - /** - * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. - * - * Hint: the OllamaChatRequestModel#getStream() property is not implemented. - * - * @param request request object to be sent to the server - * @return - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen - */ - public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException{ - return chat(request,null); - } - - /** - * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. - * - * Hint: the OllamaChatRequestModel#getStream() property is not implemented. - * - * @param request request object to be sent to the server - * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) - * @return - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read - * @throws InterruptedException in case the server is not reachable or network issues happen - */ - public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException{ - OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); - OllamaResult result; - if(streamHandler != null){ - request.setStream(true); - result = requestCaller.call(request, streamHandler); + */ + public OllamaChatResult chat(String model, List messages) throws OllamaBaseException, IOException, InterruptedException { + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model); + return chat(builder.withMessages(messages).build()); } - else { - result = requestCaller.callSync(request); + + /** + * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. + *

+ * Hint: the OllamaChatRequestModel#getStream() property is not implemented. + * + * @param request request object to be sent to the server + * @return + * @throws OllamaBaseException any response code than 200 has been returned + * @throws IOException in case the responseStream can not be read + * @throws InterruptedException in case the server is not reachable or network issues happen + */ + public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException { + return chat(request, null); } - return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); - } - // technical private methods // - - private static String encodeFileToBase64(File file) throws IOException { - return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); - } - - private static String encodeByteArrayToBase64(byte[] bytes) { - return Base64.getEncoder().encodeToString(bytes); - } - - private OllamaResult generateSyncForOllamaRequestModel( - OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException { - OllamaGenerateEndpointCaller requestCaller = - new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); - OllamaResult result; - if (streamHandler != null) { - ollamaRequestModel.setStream(true); - result = requestCaller.call(ollamaRequestModel, streamHandler); - } else { - result = requestCaller.callSync(ollamaRequestModel); + /** + * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. + *

+ * Hint: the OllamaChatRequestModel#getStream() property is not implemented. + * + * @param request request object to be sent to the server + * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated) + * @return + * @throws OllamaBaseException any response code than 200 has been returned + * @throws IOException in case the responseStream can not be read + * @throws InterruptedException in case the server is not reachable or network issues happen + */ + public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { + OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); + OllamaResult result; + if (streamHandler != null) { + request.setStream(true); + result = requestCaller.call(request, streamHandler); + } else { + result = requestCaller.callSync(request); + } + return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages()); } - return result; - } - /** - * Get default request builder. - * - * @param uri URI to get a HttpRequest.Builder - * @return HttpRequest.Builder - */ - private HttpRequest.Builder getRequestBuilderDefault(URI uri) { - HttpRequest.Builder requestBuilder = - HttpRequest.newBuilder(uri) - .header("Content-Type", "application/json") - .timeout(Duration.ofSeconds(requestTimeoutSeconds)); - if (isBasicAuthCredentialsSet()) { - requestBuilder.header("Authorization", getBasicAuthHeaderValue()); + // technical private methods // + + private static String encodeFileToBase64(File file) throws IOException { + return Base64.getEncoder().encodeToString(Files.readAllBytes(file.toPath())); } - return requestBuilder; - } - /** - * Get basic authentication header value. - * - * @return basic authentication header value (encoded credentials) - */ - private String getBasicAuthHeaderValue() { - String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword(); - return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); - } + private static String encodeByteArrayToBase64(byte[] bytes) { + return Base64.getEncoder().encodeToString(bytes); + } - /** - * Check if Basic Auth credentials set. - * - * @return true when Basic Auth credentials set - */ - private boolean isBasicAuthCredentialsSet() { - return basicAuth != null; - } + private OllamaResult generateSyncForOllamaRequestModel( + OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler) + throws OllamaBaseException, IOException, InterruptedException { + OllamaGenerateEndpointCaller requestCaller = + new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose); + OllamaResult result; + if (streamHandler != null) { + ollamaRequestModel.setStream(true); + result = requestCaller.call(ollamaRequestModel, streamHandler); + } else { + result = requestCaller.callSync(ollamaRequestModel); + } + return result; + } + + /** + * Get default request builder. + * + * @param uri URI to get a HttpRequest.Builder + * @return HttpRequest.Builder + */ + private HttpRequest.Builder getRequestBuilderDefault(URI uri) { + HttpRequest.Builder requestBuilder = + HttpRequest.newBuilder(uri) + .header("Content-Type", "application/json") + .timeout(Duration.ofSeconds(requestTimeoutSeconds)); + if (isBasicAuthCredentialsSet()) { + requestBuilder.header("Authorization", getBasicAuthHeaderValue()); + } + return requestBuilder; + } + + /** + * Get basic authentication header value. + * + * @return basic authentication header value (encoded credentials) + */ + private String getBasicAuthHeaderValue() { + String credentialsToEncode = basicAuth.getUsername() + ":" + basicAuth.getPassword(); + return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); + } + + /** + * Check if Basic Auth credentials set. + * + * @return true when Basic Auth credentials set + */ + private boolean isBasicAuthCredentialsSet() { + return basicAuth != null; + } } diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaEndpointCaller.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaEndpointCaller.java index ad8d5bb..350200a 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaEndpointCaller.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaEndpointCaller.java @@ -1,5 +1,15 @@ package io.github.amithkoujalgi.ollama4j.core.models.request; +import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; +import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; +import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; +import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; +import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; +import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; +import io.github.amithkoujalgi.ollama4j.core.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -12,22 +22,11 @@ import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; -import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; -import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; -import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel; -import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; -import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; -import io.github.amithkoujalgi.ollama4j.core.utils.Utils; - /** * Abstract helperclass to call the ollama api server. */ public abstract class OllamaEndpointCaller { - + private static final Logger LOG = LoggerFactory.getLogger(OllamaAPI.class); private String host; @@ -49,107 +48,105 @@ public abstract class OllamaEndpointCaller { /** * Calls the api server on the given host and endpoint suffix asynchronously, aka waiting for the response. - * + * * @param body POST body payload * @return result answer given by the assistant - * @throws OllamaBaseException any response code than 200 has been returned - * @throws IOException in case the responseStream can not be read + * @throws OllamaBaseException any response code than 200 has been returned + * @throws IOException in case the responseStream can not be read * @throws InterruptedException in case the server is not reachable or network issues happen */ - public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{ - + public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException { // Create Request - long startTime = System.currentTimeMillis(); - HttpClient httpClient = HttpClient.newHttpClient(); - URI uri = URI.create(this.host + getEndpointSuffix()); - HttpRequest.Builder requestBuilder = - getRequestBuilderDefault(uri) - .POST( - body.getBodyPublisher()); - HttpRequest request = requestBuilder.build(); - if (this.verbose) LOG.info("Asking model: " + body.toString()); - HttpResponse response = - httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); - - + long startTime = System.currentTimeMillis(); + HttpClient httpClient = HttpClient.newHttpClient(); + URI uri = URI.create(this.host + getEndpointSuffix()); + HttpRequest.Builder requestBuilder = + getRequestBuilderDefault(uri) + .POST( + body.getBodyPublisher()); + HttpRequest request = requestBuilder.build(); + if (this.verbose) LOG.info("Asking model: " + body.toString()); + HttpResponse response = + httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); + int statusCode = response.statusCode(); - InputStream responseBodyStream = response.body(); - StringBuilder responseBuffer = new StringBuilder(); - try (BufferedReader reader = - new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { - if (statusCode == 404) { - LOG.warn("Status code: 404 (Not Found)"); - OllamaErrorResponseModel ollamaResponseModel = - Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 401) { - LOG.warn("Status code: 401 (Unauthorized)"); - OllamaErrorResponseModel ollamaResponseModel = - Utils.getObjectMapper() - .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else if (statusCode == 400) { - LOG.warn("Status code: 400 (Bad Request)"); - OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, - OllamaErrorResponseModel.class); - responseBuffer.append(ollamaResponseModel.getError()); - } else { - boolean finished = parseResponseAndAddToBuffer(line,responseBuffer); - if (finished) { - break; + InputStream responseBodyStream = response.body(); + StringBuilder responseBuffer = new StringBuilder(); + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + if (statusCode == 404) { + LOG.warn("Status code: 404 (Not Found)"); + OllamaErrorResponseModel ollamaResponseModel = + Utils.getObjectMapper().readValue(line, OllamaErrorResponseModel.class); + responseBuffer.append(ollamaResponseModel.getError()); + } else if (statusCode == 401) { + LOG.warn("Status code: 401 (Unauthorized)"); + OllamaErrorResponseModel ollamaResponseModel = + Utils.getObjectMapper() + .readValue("{\"error\":\"Unauthorized\"}", OllamaErrorResponseModel.class); + responseBuffer.append(ollamaResponseModel.getError()); + } else if (statusCode == 400) { + LOG.warn("Status code: 400 (Bad Request)"); + OllamaErrorResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, + OllamaErrorResponseModel.class); + responseBuffer.append(ollamaResponseModel.getError()); + } else { + boolean finished = parseResponseAndAddToBuffer(line, responseBuffer); + if (finished) { + break; + } + } } } - } - } - if (statusCode != 200) { - LOG.error("Status code " + statusCode); - throw new OllamaBaseException(responseBuffer.toString()); - } else { - long endTime = System.currentTimeMillis(); - OllamaResult ollamaResult = - new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode); - if (verbose) LOG.info("Model response: " + ollamaResult); - return ollamaResult; + if (statusCode != 200) { + LOG.error("Status code " + statusCode); + throw new OllamaBaseException(responseBuffer.toString()); + } else { + long endTime = System.currentTimeMillis(); + OllamaResult ollamaResult = + new OllamaResult(responseBuffer.toString().trim(), endTime - startTime, statusCode); + if (verbose) LOG.info("Model response: " + ollamaResult); + return ollamaResult; + } } - } /** - * Get default request builder. - * - * @param uri URI to get a HttpRequest.Builder - * @return HttpRequest.Builder - */ - private HttpRequest.Builder getRequestBuilderDefault(URI uri) { - HttpRequest.Builder requestBuilder = - HttpRequest.newBuilder(uri) - .header("Content-Type", "application/json") - .timeout(Duration.ofSeconds(this.requestTimeoutSeconds)); - if (isBasicAuthCredentialsSet()) { - requestBuilder.header("Authorization", getBasicAuthHeaderValue()); + * Get default request builder. + * + * @param uri URI to get a HttpRequest.Builder + * @return HttpRequest.Builder + */ + private HttpRequest.Builder getRequestBuilderDefault(URI uri) { + HttpRequest.Builder requestBuilder = + HttpRequest.newBuilder(uri) + .header("Content-Type", "application/json") + .timeout(Duration.ofSeconds(this.requestTimeoutSeconds)); + if (isBasicAuthCredentialsSet()) { + requestBuilder.header("Authorization", getBasicAuthHeaderValue()); + } + return requestBuilder; } - return requestBuilder; - } - /** - * Get basic authentication header value. - * - * @return basic authentication header value (encoded credentials) - */ - private String getBasicAuthHeaderValue() { - String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword(); - return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); - } + /** + * Get basic authentication header value. + * + * @return basic authentication header value (encoded credentials) + */ + private String getBasicAuthHeaderValue() { + String credentialsToEncode = this.basicAuth.getUsername() + ":" + this.basicAuth.getPassword(); + return "Basic " + Base64.getEncoder().encodeToString(credentialsToEncode.getBytes()); + } + + /** + * Check if Basic Auth credentials set. + * + * @return true when Basic Auth credentials set + */ + private boolean isBasicAuthCredentialsSet() { + return this.basicAuth != null; + } - /** - * Check if Basic Auth credentials set. - * - * @return true when Basic Auth credentials set - */ - private boolean isBasicAuthCredentialsSet() { - return this.basicAuth != null; - } - } From e1b6dc3b540db242fdd5b74283ccabd1ff32218e Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 04:59:07 +0000 Subject: [PATCH 32/69] [maven-release-plugin] prepare release v1.0.66 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 0d5d73c..211eddf 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.66-SNAPSHOT + 1.0.66 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.66 From 391a9242c3c58251351302d7d79018fbc2b34ad9 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 04:59:08 +0000 Subject: [PATCH 33/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 211eddf..c4eff84 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.66 + 1.0.67-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.66 + v1.0.16 From 92618e5084c04e68bb6987b386d7e510151e6a44 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Tue, 14 May 2024 10:35:55 +0530 Subject: [PATCH 34/69] Updated `OllamaChatResponseModel` to include `done_reason` field. Refer to the Ollama version: https://github.com/ollama/ollama/releases/tag/v0.1.37 --- .../models/chat/OllamaChatResponseModel.java | 3 ++- .../request/OllamaChatEndpointCaller.java | 21 +++++++------------ 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/chat/OllamaChatResponseModel.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/chat/OllamaChatResponseModel.java index 4d0b027..418338f 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/chat/OllamaChatResponseModel.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/chat/OllamaChatResponseModel.java @@ -1,14 +1,15 @@ package io.github.amithkoujalgi.ollama4j.core.models.chat; import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; import java.util.List; -import lombok.Data; @Data public class OllamaChatResponseModel { private String model; private @JsonProperty("created_at") String createdAt; + private @JsonProperty("done_reason") String doneReason; private OllamaChatMessage message; private boolean done; private String error; diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaChatEndpointCaller.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaChatEndpointCaller.java index 811ef11..cc6c7f8 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaChatEndpointCaller.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaChatEndpointCaller.java @@ -1,12 +1,6 @@ package io.github.amithkoujalgi.ollama4j.core.models.request; -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.core.JsonProcessingException; - import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth; @@ -15,11 +9,15 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver; import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; /** * Specialization class for requests */ -public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ +public class OllamaChatEndpointCaller extends OllamaEndpointCaller { private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class); @@ -39,14 +37,14 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ try { OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class); responseBuffer.append(ollamaResponseModel.getMessage().getContent()); - if(streamObserver != null) { + if (streamObserver != null) { streamObserver.notify(ollamaResponseModel); } return ollamaResponseModel.isDone(); } catch (JsonProcessingException e) { - LOG.error("Error parsing the Ollama chat response!",e); + LOG.error("Error parsing the Ollama chat response!", e); return true; - } + } } public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) @@ -54,7 +52,4 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{ streamObserver = new OllamaChatStreamObserver(streamHandler); return super.callSync(body); } - - - } From 42b15ad93f04b6a0656ef152dd6f5fd6bd12c95f Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:07:18 +0000 Subject: [PATCH 35/69] [maven-release-plugin] prepare release v1.0.67 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index c4eff84..b7e232f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.67-SNAPSHOT + 1.0.67 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.67 From 250b1abc795da298ff20a95ac1d38921eabf6081 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:07:20 +0000 Subject: [PATCH 36/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index b7e232f..a7eae27 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.67 + 1.0.68-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.67 + v1.0.16 From e8f99f28ecc89ce4979343162d6163d640011cce Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Tue, 14 May 2024 10:58:29 +0530 Subject: [PATCH 37/69] Updated library usages in README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 6cdfc25..2dc53fa 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,13 @@ make it Releases (newer artifact versions) are done automatically on pushing the code to the `main` branch through GitHub Actions CI workflow. +#### Who's using Ollama4j? + +- `Datafaker`: a library to generate fake data + - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api +- `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j + - https://github.com/TEAMPB/ollama4j-vaadin-ui + #### Traction [![Star History Chart](https://api.star-history.com/svg?repos=amithkoujalgi/ollama4j&type=Date)](https://star-history.com/#amithkoujalgi/ollama4j&Date) From c296b34174cd4be2d38b9d1f7d64324c4d0d9c1f Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:29:35 +0000 Subject: [PATCH 38/69] [maven-release-plugin] prepare release v1.0.68 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index a7eae27..6d0d17f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.68-SNAPSHOT + 1.0.68 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.68 From e2d555d4041733e4554cfcbbcac5bc8970611f85 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:29:36 +0000 Subject: [PATCH 39/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6d0d17f..6a4dbb6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.68 + 1.0.69-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.68 + v1.0.16 From 2cbaf12d7c910dd386496d6ac213aea1f6538e6d Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Tue, 14 May 2024 11:11:38 +0530 Subject: [PATCH 40/69] Updated library usages in README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 2dc53fa..3eb0276 100644 --- a/README.md +++ b/README.md @@ -116,6 +116,9 @@ Actions CI workflow. - https://github.com/datafaker-net/datafaker-experimental/tree/main/ollama-api - `Vaadin Web UI`: UI-Tester for Interactions with Ollama via ollama4j - https://github.com/TEAMPB/ollama4j-vaadin-ui +- `ollama-translator`: Minecraft 1.20.6 spigot plugin allows to easily break language barriers by using ollama on the + server to translate all messages into a specfic target language. + - https://github.com/liebki/ollama-translator #### Traction From e750c2d7f92d1fa35a00a3aba0b86edda31d56fc Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:42:55 +0000 Subject: [PATCH 41/69] [maven-release-plugin] prepare release v1.0.69 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6a4dbb6..6292671 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.69-SNAPSHOT + 1.0.69 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.69 From b7cd81a7f5322954c35f770dfdd07fd5ef2c3738 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Tue, 14 May 2024 05:42:56 +0000 Subject: [PATCH 42/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6292671..c340387 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.69 + 1.0.70-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.69 + v1.0.16 From 3ed3187ba972fab5e8a930007a9b5865fbd29f77 Mon Sep 17 00:00:00 2001 From: AgentSchmecker Date: Thu, 16 May 2024 22:00:11 +0000 Subject: [PATCH 43/69] Updates Model.java to be up to date with current OllamaAPI Also adds Jackson-JSR310 for java.time JSON Mapping --- pom.xml | 7 ++++++- .../amithkoujalgi/ollama4j/core/models/Model.java | 6 +++++- .../amithkoujalgi/ollama4j/core/utils/Utils.java | 10 +++++++++- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index c340387..f88fe1d 100644 --- a/pom.xml +++ b/pom.xml @@ -149,7 +149,12 @@ com.fasterxml.jackson.core jackson-databind - 2.15.3 + 2.17.1 + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + 2.17.1 ch.qos.logback diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java index 27fd3e5..79c2458 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java @@ -1,5 +1,7 @@ package io.github.amithkoujalgi.ollama4j.core.models; +import java.time.LocalDateTime; + import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; @@ -11,7 +13,9 @@ public class Model { private String name; private String model; @JsonProperty("modified_at") - private String modifiedAt; + private LocalDateTime modifiedAt; + @JsonProperty("expires_at") + private LocalDateTime expiresAt; private String digest; private long size; @JsonProperty("details") diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/utils/Utils.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/utils/Utils.java index 1504c1d..96b07ae 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/utils/Utils.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/utils/Utils.java @@ -8,10 +8,18 @@ import java.net.URISyntaxException; import java.net.URL; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; public class Utils { + + private static ObjectMapper objectMapper; + public static ObjectMapper getObjectMapper() { - return new ObjectMapper(); + if(objectMapper == null) { + objectMapper = new ObjectMapper(); + objectMapper.registerModule(new JavaTimeModule()); + } + return objectMapper; } public static byte[] loadImageBytesFromUrl(String imageUrl) From e1b9d427717307141bd5503d58ccfb359506ccc1 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sun, 19 May 2024 13:57:32 +0000 Subject: [PATCH 44/69] [maven-release-plugin] prepare release v1.0.70 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index f88fe1d..9be66c2 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.70-SNAPSHOT + 1.0.70 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.70 From 3a264cb6bb6d76fa5f55fd121657780738748f8a Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Sun, 19 May 2024 13:57:34 +0000 Subject: [PATCH 45/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 9be66c2..5319db4 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.70 + 1.0.71-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.70 + v1.0.16 From 45e5d07581b640cef07f82655f0d729d9ba76dd9 Mon Sep 17 00:00:00 2001 From: Kelvin Watson Date: Sun, 19 May 2024 11:57:09 -0700 Subject: [PATCH 46/69] update README to include gradle options --- README.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3eb0276..bd5fbfc 100644 --- a/README.md +++ b/README.md @@ -67,10 +67,31 @@ In your Maven project, add this dependency: io.github.amithkoujalgi ollama4j - 1.0.57 + 1.0.70 ``` +or + +In your Gradle project, add the dependency using the Kotlin DSL or the Groovy DSL: + +```kotlin +dependencies { + + val ollama4jVersion = "1.0.70" + + implementation("io.github.amithkoujalgi:ollama4j:$ollama4jVersion") +} + ``` + +```groovy +dependencies { + implementation("io.github.amithkoujalgi:ollama4j:1.0.70") + +} + +``` + Latest release: ![Maven Central](https://img.shields.io/maven-central/v/io.github.amithkoujalgi/ollama4j) From 2866d83a2fc8815d7863dbc65a17eec185004372 Mon Sep 17 00:00:00 2001 From: Kelvin Watson Date: Sun, 19 May 2024 11:58:08 -0700 Subject: [PATCH 47/69] Update README.md --- README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index bd5fbfc..e45b4a3 100644 --- a/README.md +++ b/README.md @@ -87,9 +87,7 @@ dependencies { ```groovy dependencies { implementation("io.github.amithkoujalgi:ollama4j:1.0.70") - } - ``` Latest release: @@ -181,4 +179,4 @@ project. ### References -- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) \ No newline at end of file +- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) From 9c6a55f7b0bd0f9f805f3546368752c4a46448f9 Mon Sep 17 00:00:00 2001 From: AgentSchmecker Date: Mon, 20 May 2024 11:08:49 +0000 Subject: [PATCH 48/69] Generalizes Abstract Serialization Test Class Removes the "Request" naming context as this base class technically serves for general serialization purposes. --- ...st.java => AbstractSerializationTest.java} | 16 +++++----- .../jackson/TestChatRequestSerialization.java | 32 +++++++++---------- .../TestEmbeddingsRequestSerialization.java | 10 +++--- .../TestGenerateRequestSerialization.java | 12 +++---- 4 files changed, 35 insertions(+), 35 deletions(-) rename src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/{AbstractRequestSerializationTest.java => AbstractSerializationTest.java} (64%) diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractSerializationTest.java similarity index 64% rename from src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java rename to src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractSerializationTest.java index c6b2ff5..d0ffc2c 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractRequestSerializationTest.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/AbstractSerializationTest.java @@ -6,30 +6,30 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; -public abstract class AbstractRequestSerializationTest { +public abstract class AbstractSerializationTest { protected ObjectMapper mapper = Utils.getObjectMapper(); - protected String serializeRequest(T req) { + protected String serialize(T obj) { try { - return mapper.writeValueAsString(req); + return mapper.writeValueAsString(obj); } catch (JsonProcessingException e) { fail("Could not serialize request!", e); return null; } } - protected T deserializeRequest(String jsonRequest, Class requestClass) { + protected T deserialize(String jsonObject, Class deserializationClass) { try { - return mapper.readValue(jsonRequest, requestClass); + return mapper.readValue(jsonObject, deserializationClass); } catch (JsonProcessingException e) { - fail("Could not deserialize jsonRequest!", e); + fail("Could not deserialize jsonObject!", e); return null; } } - protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest, + protected void assertEqualsAfterUnmarshalling(T unmarshalledObject, T req) { - assertEquals(req, unmarshalledRequest); + assertEquals(req, unmarshalledObject); } } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java index e4655bf..3ad049c 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestChatRequestSerialization.java @@ -14,7 +14,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilde import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; -public class TestChatRequestSerialization extends AbstractRequestSerializationTest{ +public class TestChatRequestSerialization extends AbstractSerializationTest { private OllamaChatRequestBuilder builder; @@ -26,8 +26,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe @Test public void testRequestOnlyMandatoryFields() { OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); } @Test @@ -35,16 +35,16 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt") .withMessage(OllamaChatMessageRole.USER, "Some prompt") .build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); } @Test public void testRequestWithMessageAndImage() { OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt", List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaChatRequestModel.class), req); } @Test @@ -61,8 +61,8 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe .withOptions(b.setTopP(1).build()) .build(); - String jsonRequest = serializeRequest(req); - OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class); + String jsonRequest = serialize(req); + OllamaChatRequestModel deserializeRequest = deserialize(jsonRequest, OllamaChatRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); assertEquals(1.0, deserializeRequest.getOptions().get("temperature")); @@ -79,7 +79,7 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt") .withGetJsonResponse().build(); - String jsonRequest = serializeRequest(req); + String jsonRequest = serialize(req); // no jackson deserialization as format property is not boolean ==> omit as deserialization // of request is never used in real code anyways JSONObject jsonObject = new JSONObject(jsonRequest); @@ -91,15 +91,15 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe public void testWithTemplate() { OllamaChatRequestModel req = builder.withTemplate("System Template") .build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaChatRequestModel.class), req); } @Test public void testWithStreaming() { OllamaChatRequestModel req = builder.withStreaming().build(); - String jsonRequest = serializeRequest(req); - assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true); + String jsonRequest = serialize(req); + assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).isStream(), true); } @Test @@ -107,7 +107,7 @@ public class TestChatRequestSerialization extends AbstractRequestSerializationTe String expectedKeepAlive = "5m"; OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive) .build(); - String jsonRequest = serializeRequest(req); - assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); + String jsonRequest = serialize(req); + assertEquals(deserialize(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive); } } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java index ff1e308..a546d6d 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestEmbeddingsRequestSerialization.java @@ -7,7 +7,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsR import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; -public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest{ +public class TestEmbeddingsRequestSerialization extends AbstractSerializationTest { private OllamaEmbeddingsRequestBuilder builder; @@ -19,8 +19,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa @Test public void testRequestOnlyMandatoryFields() { OllamaEmbeddingsRequestModel req = builder.build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class), req); } @Test @@ -29,8 +29,8 @@ public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializa OllamaEmbeddingsRequestModel req = builder .withOptions(b.setMirostat(1).build()).build(); - String jsonRequest = serializeRequest(req); - OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class); + String jsonRequest = serialize(req); + OllamaEmbeddingsRequestModel deserializeRequest = deserialize(jsonRequest,OllamaEmbeddingsRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java index 03610f7..8e95288 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestGenerateRequestSerialization.java @@ -11,7 +11,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateReque import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; -public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest{ +public class TestGenerateRequestSerialization extends AbstractSerializationTest { private OllamaGenerateRequestBuilder builder; @@ -24,8 +24,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati public void testRequestOnlyMandatoryFields() { OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build(); - String jsonRequest = serializeRequest(req); - assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req); + String jsonRequest = serialize(req); + assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaGenerateRequestModel.class), req); } @Test @@ -34,8 +34,8 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build(); - String jsonRequest = serializeRequest(req); - OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class); + String jsonRequest = serialize(req); + OllamaGenerateRequestModel deserializeRequest = deserialize(jsonRequest, OllamaGenerateRequestModel.class); assertEqualsAfterUnmarshalling(deserializeRequest, req); assertEquals(1, deserializeRequest.getOptions().get("mirostat")); } @@ -45,7 +45,7 @@ public class TestGenerateRequestSerialization extends AbstractRequestSerializati OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").withGetJsonResponse().build(); - String jsonRequest = serializeRequest(req); + String jsonRequest = serialize(req); // no jackson deserialization as format property is not boolean ==> omit as deserialization // of request is never used in real code anyways JSONObject jsonObject = new JSONObject(jsonRequest); From f8ca4d041d61a3f769439b4a6fc805c3ec74d108 Mon Sep 17 00:00:00 2001 From: AgentSchmecker Date: Mon, 20 May 2024 11:10:03 +0000 Subject: [PATCH 49/69] Changes DateTime types of Model.java to OffsetDatetime Fixes #48 --- .../ollama4j/core/models/Model.java | 5 ++- .../TestModelRequestSerialization.java | 42 +++++++++++++++++++ 2 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestModelRequestSerialization.java diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java index 79c2458..15efd70 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/Model.java @@ -1,6 +1,7 @@ package io.github.amithkoujalgi.ollama4j.core.models; import java.time.LocalDateTime; +import java.time.OffsetDateTime; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; @@ -13,9 +14,9 @@ public class Model { private String name; private String model; @JsonProperty("modified_at") - private LocalDateTime modifiedAt; + private OffsetDateTime modifiedAt; @JsonProperty("expires_at") - private LocalDateTime expiresAt; + private OffsetDateTime expiresAt; private String digest; private long size; @JsonProperty("details") diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestModelRequestSerialization.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestModelRequestSerialization.java new file mode 100644 index 0000000..712e507 --- /dev/null +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/jackson/TestModelRequestSerialization.java @@ -0,0 +1,42 @@ +package io.github.amithkoujalgi.ollama4j.unittests.jackson; + +import io.github.amithkoujalgi.ollama4j.core.models.Model; +import org.junit.jupiter.api.Test; + +public class TestModelRequestSerialization extends AbstractSerializationTest { + + @Test + public void testDeserializationOfModelResponseWithOffsetTime(){ + String serializedTestStringWithOffsetTime = "{\n" + + "\"name\": \"codellama:13b\",\n" + + "\"modified_at\": \"2023-11-04T14:56:49.277302595-07:00\",\n" + + "\"size\": 7365960935,\n" + + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" + + "\"details\": {\n" + + "\"format\": \"gguf\",\n" + + "\"family\": \"llama\",\n" + + "\"families\": null,\n" + + "\"parameter_size\": \"13B\",\n" + + "\"quantization_level\": \"Q4_0\"\n" + + "}}"; + deserialize(serializedTestStringWithOffsetTime,Model.class); + } + + @Test + public void testDeserializationOfModelResponseWithZuluTime(){ + String serializedTestStringWithZuluTimezone = "{\n" + + "\"name\": \"codellama:13b\",\n" + + "\"modified_at\": \"2023-11-04T14:56:49.277302595Z\",\n" + + "\"size\": 7365960935,\n" + + "\"digest\": \"9f438cb9cd581fc025612d27f7c1a6669ff83a8bb0ed86c94fcf4c5440555697\",\n" + + "\"details\": {\n" + + "\"format\": \"gguf\",\n" + + "\"family\": \"llama\",\n" + + "\"families\": null,\n" + + "\"parameter_size\": \"13B\",\n" + + "\"quantization_level\": \"Q4_0\"\n" + + "}}"; + deserialize(serializedTestStringWithZuluTimezone,Model.class); + } + +} From 3a9b8c309dc452fafd64264807f441e4313af4a4 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 20 May 2024 14:53:54 +0000 Subject: [PATCH 50/69] [maven-release-plugin] prepare release v1.0.71 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 5319db4..cdb8d4d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.71-SNAPSHOT + 1.0.71 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.71 From b55925df281707ee137211f65e5e919406f93c76 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 20 May 2024 14:53:55 +0000 Subject: [PATCH 51/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index cdb8d4d..a0bd990 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.71 + 1.0.72-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.71 + v1.0.16 From e33ad1a1e3e7d26c5ad61bf2518ca12c2a924917 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 20 May 2024 14:55:01 +0000 Subject: [PATCH 52/69] [maven-release-plugin] prepare release v1.0.72 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index a0bd990..51ae5d5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.72-SNAPSHOT + 1.0.72 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.72 From bb0785140b808300d327ea793680f9c9dd145161 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Mon, 20 May 2024 14:55:02 +0000 Subject: [PATCH 53/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 51ae5d5..de40186 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.72 + 1.0.73-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.72 + v1.0.16 From e39c47b8e13f51de2bc38c3cfce60ab4c63d71ff Mon Sep 17 00:00:00 2001 From: andrewtodd Date: Sun, 9 Jun 2024 16:43:46 +1000 Subject: [PATCH 54/69] Add codestral as a model. --- .../amithkoujalgi/ollama4j/core/types/OllamaModelType.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java index d7984d0..5733979 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java @@ -79,4 +79,5 @@ public class OllamaModelType { public static final String NOTUS = "notus"; public static final String DUCKDB_NSQL = "duckdb-nsql"; public static final String ALL_MINILM = "all-minilm"; + public static final String CODESTRAL = "codestral"; } From 165d04b1bbf74f89b759cec2a0891dcc914a7c47 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 12 Jun 2024 03:22:49 +0000 Subject: [PATCH 55/69] [maven-release-plugin] prepare release v1.0.73 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index de40186..68f7047 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.73-SNAPSHOT + 1.0.73 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.73 From b5801d84e0c1c60ac9e689942bece4a484376f19 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Wed, 12 Jun 2024 03:22:50 +0000 Subject: [PATCH 56/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 68f7047..3e73414 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.73 + 1.0.74-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.73 + v1.0.16 From 8eea19a53987d5a99ac3f6d116bf58ec428a7498 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Thu, 11 Jul 2024 23:35:39 +0530 Subject: [PATCH 57/69] Added model types - `gemma2` and `qwen2` --- .../amithkoujalgi/ollama4j/core/types/OllamaModelType.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java index 5733979..2613d86 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/types/OllamaModelType.java @@ -9,6 +9,9 @@ package io.github.amithkoujalgi.ollama4j.core.types; @SuppressWarnings("ALL") public class OllamaModelType { public static final String GEMMA = "gemma"; + public static final String GEMMA2 = "gemma2"; + + public static final String LLAMA2 = "llama2"; public static final String LLAMA3 = "llama3"; public static final String MISTRAL = "mistral"; @@ -30,6 +33,8 @@ public class OllamaModelType { public static final String ZEPHYR = "zephyr"; public static final String OPENHERMES = "openhermes"; public static final String QWEN = "qwen"; + + public static final String QWEN2 = "qwen2"; public static final String WIZARDCODER = "wizardcoder"; public static final String LLAMA2_CHINESE = "llama2-chinese"; public static final String TINYLLAMA = "tinyllama"; From 4744315d454ea6f1599a04d6cfa20be8d6214f3f Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 11 Jul 2024 18:06:59 +0000 Subject: [PATCH 58/69] [maven-release-plugin] prepare release v1.0.74 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 3e73414..d7680b1 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.74-SNAPSHOT + 1.0.74 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.74 From be549430c5e36c709cb29d9dce41523ff1990ff3 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 11 Jul 2024 18:07:00 +0000 Subject: [PATCH 59/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index d7680b1..cef549d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.74 + 1.0.75-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.74 + v1.0.16 From 515d1f039916adcb9ec620c8c33f72d7cba40d28 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Fri, 12 Jul 2024 01:05:56 +0530 Subject: [PATCH 60/69] Update README.md --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index e45b4a3..f111d6b 100644 --- a/README.md +++ b/README.md @@ -180,3 +180,8 @@ project. ### References - [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) + +## Appreciate my work? + +Buy Me A Coffee + From d9e3860123742499b05de2a804ccdcd602626084 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 11 Jul 2024 19:37:03 +0000 Subject: [PATCH 61/69] [maven-release-plugin] prepare release v1.0.75 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index cef549d..2660b74 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.75-SNAPSHOT + 1.0.75 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.75 From 8ef6fac28effc28935cb224db7f1575b36637673 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Thu, 11 Jul 2024 19:37:04 +0000 Subject: [PATCH 62/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2660b74..800ba57 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.75 + 1.0.76-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.75 + v1.0.16 From 91ee6cb4c1c807e681d21be774040c6020181d08 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Fri, 12 Jul 2024 17:06:41 +0530 Subject: [PATCH 63/69] Added support for tools/function calling - specifically for Mistral's latest model. --- docs/docs/apis-generate/generate-async.md | 2 +- .../generate-with-image-files.md | 2 +- .../apis-generate/generate-with-image-urls.md | 2 +- .../docs/apis-generate/generate-with-tools.md | 271 ++++++++ docs/docs/apis-generate/prompt-builder.md | 2 +- pom.xml | 3 +- .../ollama4j/core/OllamaAPI.java | 63 +- .../request/OllamaGenerateEndpointCaller.java | 42 +- .../ollama4j/core/tools/DynamicFunction.java | 8 + .../ollama4j/core/tools/MistralTools.java | 139 ++++ .../core/tools/OllamaToolsResult.java | 16 + .../ollama4j/core/tools/ToolDef.java | 18 + .../ollama4j/core/tools/ToolRegistry.java | 17 + .../integrationtests/TestRealAPIs.java | 652 +++++++++--------- .../ollama4j/unittests/TestMockedAPIs.java | 259 +++---- 15 files changed, 1006 insertions(+), 490 deletions(-) create mode 100644 docs/docs/apis-generate/generate-with-tools.md create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/DynamicFunction.java create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/MistralTools.java create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/OllamaToolsResult.java create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolDef.java create mode 100644 src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolRegistry.java diff --git a/docs/docs/apis-generate/generate-async.md b/docs/docs/apis-generate/generate-async.md index 7d8cc54..49f556f 100644 --- a/docs/docs/apis-generate/generate-async.md +++ b/docs/docs/apis-generate/generate-async.md @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 3 --- # Generate - Async diff --git a/docs/docs/apis-generate/generate-with-image-files.md b/docs/docs/apis-generate/generate-with-image-files.md index 37f4f03..4406981 100644 --- a/docs/docs/apis-generate/generate-with-image-files.md +++ b/docs/docs/apis-generate/generate-with-image-files.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 --- # Generate - With Image Files diff --git a/docs/docs/apis-generate/generate-with-image-urls.md b/docs/docs/apis-generate/generate-with-image-urls.md index 19d6cf1..587e8f0 100644 --- a/docs/docs/apis-generate/generate-with-image-urls.md +++ b/docs/docs/apis-generate/generate-with-image-urls.md @@ -1,5 +1,5 @@ --- -sidebar_position: 4 +sidebar_position: 5 --- # Generate - With Image URLs diff --git a/docs/docs/apis-generate/generate-with-tools.md b/docs/docs/apis-generate/generate-with-tools.md new file mode 100644 index 0000000..0ca142a --- /dev/null +++ b/docs/docs/apis-generate/generate-with-tools.md @@ -0,0 +1,271 @@ +--- +sidebar_position: 2 +--- + +# Generate - With Tools + +This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a +synchronous way. +This API correlates to +the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode. + +:::note + +This is an only an experimental implementation and has a very basic design. + +Currently, built and tested for [Mistral's latest model](https://ollama.com/library/mistral) only. We could redesign +this +in the future if tooling is supported for more models with a generic interaction standard from Ollama. + +::: + +### Function Calling/Tools + +Assume you want to call a method in your code based on the response generated from the model. +For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the +transaction from your database and respond to the user with the transaction details. + +You could do that with ease with the `function calling` capabilities of the models by registering your `tools`. + +### Create Functions + +This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns a +value. + +```java +public static String getCurrentFuelPrice(Map arguments) { + String location = arguments.get("location").toString(); + String fuelType = arguments.get("fuelType").toString(); + return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; +} +``` + +This function takes the argument `city` and performs an operation with the argument and returns a +value. + +```java +public static String getCurrentWeather(Map arguments) { + String location = arguments.get("city").toString(); + return "Currently " + location + "'s weather is nice."; +} +``` + +### Define Tool Specifications + +Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price. + +- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`). +- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`. + +```java +MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() + .functionName("current-fuel-price") + .functionDesc("Get current fuel price") + .props( + new MistralTools.PropsBuilder() + .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) + .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) + .build() + ) + .toolDefinition(SampleTools::getCurrentFuelPrice) + .build(); +``` + +Lets also define a sample tool specification called **Weather Tool** for getting the current weather. + +- Specify the function `name`, `description`, and `required` property (`city`). +- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`. + +```java +MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() + .functionName("current-weather") + .functionDesc("Get current weather") + .props( + new MistralTools.PropsBuilder() + .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) + .build() + ) + .toolDefinition(SampleTools::getCurrentWeather) + .build(); +``` + +### Register the Tools + +Register the defined tools (`fuel price` and `weather`) with the OllamaAPI. + +```shell +ollamaAPI.registerTool(fuelPriceToolSpecification); +ollamaAPI.registerTool(weatherToolSpecification); +``` + +### Create prompt with Tools + +`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools. + +```shell +String prompt1 = new MistralTools.PromptBuilder() + .withToolSpecification(fuelPriceToolSpecification) + .withToolSpecification(weatherToolSpecification) + .withPrompt("What is the petrol price in Bengaluru?") + .build(); +OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, false, new OptionsBuilder().build()); +for (Map.Entry r : toolsResult.getToolResults().entrySet()) { + System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); +} +``` + +Now, fire away your question to the model. + +You will get a response similar to: + +::::tip[LLM Response] + +[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L + +:::: + +`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools. + +```shell +String prompt2 = new MistralTools.PromptBuilder() + .withToolSpecification(fuelPriceToolSpecification) + .withToolSpecification(weatherToolSpecification) + .withPrompt("What is the current weather in Bengaluru?") + .build(); +OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, false, new OptionsBuilder().build()); +for (Map.Entry r : toolsResult.getToolResults().entrySet()) { + System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); +} +``` + +Again, fire away your question to the model. + +You will get a response similar to: + +::::tip[LLM Response] + +[Response from tool 'current-weather']: Currently Bengaluru's weather is nice +:::: + +### Full Example + +```java + +import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; +import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; +import io.github.amithkoujalgi.ollama4j.core.tools.ToolDef; +import io.github.amithkoujalgi.ollama4j.core.tools.MistralTools; +import io.github.amithkoujalgi.ollama4j.core.tools.OllamaToolsResult; +import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; + +public class FunctionCallingWithMistral { + public static void main(String[] args) throws Exception { + String host = "http://localhost:11434/"; + OllamaAPI ollamaAPI = new OllamaAPI(host); + ollamaAPI.setRequestTimeoutSeconds(60); + + String model = "mistral"; + + + MistralTools.ToolSpecification fuelPriceToolSpecification = MistralTools.ToolSpecification.builder() + .functionName("current-fuel-price") + .functionDesc("Get current fuel price") + .props( + new MistralTools.PropsBuilder() + .withProperty("location", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) + .withProperty("fuelType", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The fuel type.").enumValues(Arrays.asList("petrol", "diesel")).required(true).build()) + .build() + ) + .toolDefinition(SampleTools::getCurrentFuelPrice) + .build(); + + MistralTools.ToolSpecification weatherToolSpecification = MistralTools.ToolSpecification.builder() + .functionName("current-weather") + .functionDesc("Get current weather") + .props( + new MistralTools.PropsBuilder() + .withProperty("city", MistralTools.PromptFuncDefinition.Property.builder().type("string").description("The city, e.g. New Delhi, India").required(true).build()) + .build() + ) + .toolDefinition(SampleTools::getCurrentWeather) + .build(); + + ollamaAPI.registerTool(fuelPriceToolSpecification); + ollamaAPI.registerTool(weatherToolSpecification); + + String prompt1 = new MistralTools.PromptBuilder() + .withToolSpecification(fuelPriceToolSpecification) + .withToolSpecification(weatherToolSpecification) + .withPrompt("What is the petrol price in Bengaluru?") + .build(); + String prompt2 = new MistralTools.PromptBuilder() + .withToolSpecification(fuelPriceToolSpecification) + .withToolSpecification(weatherToolSpecification) + .withPrompt("What is the current weather in Bengaluru?") + .build(); + + ask(ollamaAPI, model, prompt1); + ask(ollamaAPI, model, prompt2); + } + + public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException { + OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, false, new OptionsBuilder().build()); + for (Map.Entry r : toolsResult.getToolResults().entrySet()) { + System.out.printf("[Response from tool '%s']: %s%n", r.getKey().getName(), r.getValue().toString()); + } + } +} + +class SampleTools { + public static String getCurrentFuelPrice(Map arguments) { + String location = arguments.get("location").toString(); + String fuelType = arguments.get("fuelType").toString(); + return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; + } + + public static String getCurrentWeather(Map arguments) { + String location = arguments.get("city").toString(); + return "Currently " + location + "'s weather is nice."; + } +} + +``` + +Run this full example and you will get a response similar to: + +::::tip[LLM Response] + +[Response from tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L + +[Response from tool 'current-weather']: Currently Bengaluru's weather is nice +:::: + +### Room for improvement + +Instead of explicitly registering `ollamaAPI.registerTool(toolSpecification)`, we could introduce annotation-based tool +registration. For example: + +```java + +@ToolSpec(name = "current-fuel-price", desc = "Get current fuel price") +public String getCurrentFuelPrice(Map arguments) { + String location = arguments.get("location").toString(); + String fuelType = arguments.get("fuelType").toString(); + return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; +} +``` + +Instead of passing a map of args `Map arguments` to the tool functions, we could support passing +specific args separately with their data types. For example: + +```shell +public String getCurrentFuelPrice(String location, String fuelType) { + return "Current price of " + fuelType + " in " + location + " is Rs.103/L"; +} +``` + +Updating async/chat APIs with support for tool-based generation. \ No newline at end of file diff --git a/docs/docs/apis-generate/prompt-builder.md b/docs/docs/apis-generate/prompt-builder.md index a798808..ffe57d7 100644 --- a/docs/docs/apis-generate/prompt-builder.md +++ b/docs/docs/apis-generate/prompt-builder.md @@ -1,5 +1,5 @@ --- -sidebar_position: 5 +sidebar_position: 6 --- # Prompt Builder diff --git a/pom.xml b/pom.xml index 800ba57..c2f3754 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 io.github.amithkoujalgi diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java index 1f22210..80654ae 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/OllamaAPI.java @@ -10,6 +10,7 @@ import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingRe import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.request.*; +import io.github.amithkoujalgi.ollama4j.core.tools.*; import io.github.amithkoujalgi.ollama4j.core.utils.Options; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; import org.slf4j.Logger; @@ -25,9 +26,7 @@ import java.net.http.HttpResponse; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.time.Duration; -import java.util.ArrayList; -import java.util.Base64; -import java.util.List; +import java.util.*; /** * The base Ollama API class. @@ -339,6 +338,7 @@ public class OllamaAPI { } } + /** * Generate response for a question to a model running on Ollama server. This is a sync/blocking * call. @@ -351,9 +351,10 @@ public class OllamaAPI { * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false. * @return OllamaResult that includes response text and time taken for response */ - public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler) + public OllamaResult generate(String model, String prompt, boolean raw, Options options, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException { OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); + ollamaRequestModel.setRaw(raw); ollamaRequestModel.setOptions(options.getOptionsMap()); return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler); } @@ -361,13 +362,37 @@ public class OllamaAPI { /** * Convenience method to call Ollama API without streaming responses. *

- * Uses {@link #generate(String, String, Options, OllamaStreamHandler)} + * Uses {@link #generate(String, String, boolean, Options, OllamaStreamHandler)} + * + * @param model Model to use + * @param prompt Prompt text + * @param raw In some cases, you may wish to bypass the templating system and provide a full prompt. In this case, you can use the raw parameter to disable templating. Also note that raw mode will not return a context. + * @param options Additional Options + * @return OllamaResult */ - public OllamaResult generate(String model, String prompt, Options options) + public OllamaResult generate(String model, String prompt, boolean raw, Options options) throws OllamaBaseException, IOException, InterruptedException { - return generate(model, prompt, options, null); + return generate(model, prompt, raw, options, null); } + + public OllamaToolsResult generateWithTools(String model, String prompt, boolean raw, Options options) + throws OllamaBaseException, IOException, InterruptedException { + OllamaToolsResult toolResult = new OllamaToolsResult(); + Map toolResults = new HashMap<>(); + + OllamaResult result = generate(model, prompt, raw, options, null); + toolResult.setModelResult(result); + + List toolDefs = Utils.getObjectMapper().readValue(result.getResponse(), Utils.getObjectMapper().getTypeFactory().constructCollectionType(List.class, ToolDef.class)); + for (ToolDef toolDef : toolDefs) { + toolResults.put(toolDef, invokeTool(toolDef)); + } + toolResult.setToolResults(toolResults); + return toolResult; + } + + /** * Generate response for a question to a model running on Ollama server and get a callback handle * that can be used to check for status and get the response from the model later. This would be @@ -377,9 +402,9 @@ public class OllamaAPI { * @param prompt the prompt/question text * @return the ollama async result callback handle */ - public OllamaAsyncResultCallback generateAsync(String model, String prompt) { + public OllamaAsyncResultCallback generateAsync(String model, String prompt, boolean raw) { OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt); - + ollamaRequestModel.setRaw(raw); URI uri = URI.create(this.host + "/api/generate"); OllamaAsyncResultCallback ollamaAsyncResultCallback = new OllamaAsyncResultCallback( @@ -576,4 +601,24 @@ public class OllamaAPI { private boolean isBasicAuthCredentialsSet() { return basicAuth != null; } + + + public void registerTool(MistralTools.ToolSpecification toolSpecification) { + ToolRegistry.addFunction(toolSpecification.getFunctionName(), toolSpecification.getToolDefinition()); + } + + private Object invokeTool(ToolDef toolDef) { + try { + String methodName = toolDef.getName(); + Map arguments = toolDef.getArguments(); + DynamicFunction function = ToolRegistry.getFunction(methodName); + if (function == null) { + throw new IllegalArgumentException("No such tool: " + methodName); + } + return function.apply(arguments); + } catch (Exception e) { + e.printStackTrace(); + return "Error calling tool: " + e.getMessage(); + } + } } diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaGenerateEndpointCaller.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaGenerateEndpointCaller.java index fe7fbec..d3d71e4 100644 --- a/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaGenerateEndpointCaller.java +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/models/request/OllamaGenerateEndpointCaller.java @@ -1,9 +1,5 @@ package io.github.amithkoujalgi.ollama4j.core.models.request; -import java.io.IOException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.core.JsonProcessingException; import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; @@ -13,15 +9,19 @@ import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRespo import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver; import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody; import io.github.amithkoujalgi.ollama4j.core.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ +import java.io.IOException; + +public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller { private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class); private OllamaGenerateStreamObserver streamObserver; public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) { - super(host, basicAuth, requestTimeoutSeconds, verbose); + super(host, basicAuth, requestTimeoutSeconds, verbose); } @Override @@ -31,24 +31,22 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{ @Override protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) { - try { - OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); - responseBuffer.append(ollamaResponseModel.getResponse()); - if(streamObserver != null) { - streamObserver.notify(ollamaResponseModel); - } - return ollamaResponseModel.isDone(); - } catch (JsonProcessingException e) { - LOG.error("Error parsing the Ollama chat response!",e); - return true; - } + try { + OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class); + responseBuffer.append(ollamaResponseModel.getResponse()); + if (streamObserver != null) { + streamObserver.notify(ollamaResponseModel); + } + return ollamaResponseModel.isDone(); + } catch (JsonProcessingException e) { + LOG.error("Error parsing the Ollama chat response!", e); + return true; + } } public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler) - throws OllamaBaseException, IOException, InterruptedException { - streamObserver = new OllamaGenerateStreamObserver(streamHandler); - return super.callSync(body); + throws OllamaBaseException, IOException, InterruptedException { + streamObserver = new OllamaGenerateStreamObserver(streamHandler); + return super.callSync(body); } - - } diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/DynamicFunction.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/DynamicFunction.java new file mode 100644 index 0000000..5b8f5e6 --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/DynamicFunction.java @@ -0,0 +1,8 @@ +package io.github.amithkoujalgi.ollama4j.core.tools; + +import java.util.Map; + +@FunctionalInterface +public interface DynamicFunction { + Object apply(Map arguments); +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/MistralTools.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/MistralTools.java new file mode 100644 index 0000000..fff8071 --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/MistralTools.java @@ -0,0 +1,139 @@ +package io.github.amithkoujalgi.ollama4j.core.tools; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import io.github.amithkoujalgi.ollama4j.core.utils.Utils; +import lombok.Builder; +import lombok.Data; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class MistralTools { + @Data + @Builder + public static class ToolSpecification { + private String functionName; + private String functionDesc; + private Map props; + private DynamicFunction toolDefinition; + } + + @Data + @JsonIgnoreProperties(ignoreUnknown = true) + public static class PromptFuncDefinition { + private String type; + private PromptFuncSpec function; + + @Data + public static class PromptFuncSpec { + private String name; + private String description; + private Parameters parameters; + } + + @Data + public static class Parameters { + private String type; + private Map properties; + private List required; + } + + @Data + @Builder + public static class Property { + private String type; + private String description; + @JsonProperty("enum") + @JsonInclude(JsonInclude.Include.NON_NULL) + private List enumValues; + @JsonIgnore + private boolean required; + } + } + + public static class PropsBuilder { + private final Map props = new HashMap<>(); + + public PropsBuilder withProperty(String key, PromptFuncDefinition.Property property) { + props.put(key, property); + return this; + } + + public Map build() { + return props; + } + } + + public static class PromptBuilder { + private final List tools = new ArrayList<>(); + + private String promptText; + + public String build() throws JsonProcessingException { + return "[AVAILABLE_TOOLS] " + Utils.getObjectMapper().writeValueAsString(tools) + "[/AVAILABLE_TOOLS][INST] " + promptText + " [/INST]"; + } + + public PromptBuilder withPrompt(String prompt) throws JsonProcessingException { + promptText = prompt; + return this; + } + + public PromptBuilder withToolSpecification(ToolSpecification spec) { + PromptFuncDefinition def = new PromptFuncDefinition(); + def.setType("function"); + + PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); + functionDetail.setName(spec.getFunctionName()); + functionDetail.setDescription(spec.getFunctionDesc()); + + PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); + parameters.setType("object"); + parameters.setProperties(spec.getProps()); + + List requiredValues = new ArrayList<>(); + for (Map.Entry p : spec.getProps().entrySet()) { + if (p.getValue().isRequired()) { + requiredValues.add(p.getKey()); + } + } + parameters.setRequired(requiredValues); + functionDetail.setParameters(parameters); + def.setFunction(functionDetail); + + tools.add(def); + return this; + } +// +// public PromptBuilder withToolSpecification(String functionName, String functionDesc, Map props) { +// PromptFuncDefinition def = new PromptFuncDefinition(); +// def.setType("function"); +// +// PromptFuncDefinition.PromptFuncSpec functionDetail = new PromptFuncDefinition.PromptFuncSpec(); +// functionDetail.setName(functionName); +// functionDetail.setDescription(functionDesc); +// +// PromptFuncDefinition.Parameters parameters = new PromptFuncDefinition.Parameters(); +// parameters.setType("object"); +// parameters.setProperties(props); +// +// List requiredValues = new ArrayList<>(); +// for (Map.Entry p : props.entrySet()) { +// if (p.getValue().isRequired()) { +// requiredValues.add(p.getKey()); +// } +// } +// parameters.setRequired(requiredValues); +// functionDetail.setParameters(parameters); +// def.setFunction(functionDetail); +// +// tools.add(def); +// return this; +// } + } +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/OllamaToolsResult.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/OllamaToolsResult.java new file mode 100644 index 0000000..65ef3ac --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/OllamaToolsResult.java @@ -0,0 +1,16 @@ +package io.github.amithkoujalgi.ollama4j.core.tools; + +import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.Map; + +@Data +@NoArgsConstructor +@AllArgsConstructor +public class OllamaToolsResult { + private OllamaResult modelResult; + private Map toolResults; +} diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolDef.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolDef.java new file mode 100644 index 0000000..751d186 --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolDef.java @@ -0,0 +1,18 @@ +package io.github.amithkoujalgi.ollama4j.core.tools; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.Map; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class ToolDef { + + private String name; + private Map arguments; + +} + diff --git a/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolRegistry.java b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolRegistry.java new file mode 100644 index 0000000..0004c7f --- /dev/null +++ b/src/main/java/io/github/amithkoujalgi/ollama4j/core/tools/ToolRegistry.java @@ -0,0 +1,17 @@ +package io.github.amithkoujalgi.ollama4j.core.tools; + +import java.util.HashMap; +import java.util.Map; + +public class ToolRegistry { + private static final Map functionMap = new HashMap<>(); + + + public static DynamicFunction getFunction(String name) { + return functionMap.get(name); + } + + public static void addFunction(String name, DynamicFunction function) { + functionMap.put(name, function); + } +} diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java b/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java index d822077..58e55a1 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/integrationtests/TestRealAPIs.java @@ -1,7 +1,5 @@ package io.github.amithkoujalgi.ollama4j.integrationtests; -import static org.junit.jupiter.api.Assertions.*; - import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; @@ -10,9 +8,16 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult; -import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder; +import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; +import lombok.Data; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -22,372 +27,369 @@ import java.net.http.HttpConnectTimeoutException; import java.util.List; import java.util.Objects; import java.util.Properties; -import lombok.Data; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import static org.junit.jupiter.api.Assertions.*; class TestRealAPIs { - private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class); - OllamaAPI ollamaAPI; - Config config; + OllamaAPI ollamaAPI; + Config config; - private File getImageFileFromClasspath(String fileName) { - ClassLoader classLoader = getClass().getClassLoader(); - return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); - } - - @BeforeEach - void setUp() { - config = new Config(); - ollamaAPI = new OllamaAPI(config.getOllamaURL()); - ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds()); - } - - @Test - @Order(1) - void testWrongEndpoint() { - OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434"); - assertThrows(ConnectException.class, ollamaAPI::listModels); - } - - @Test - @Order(1) - void testEndpointReachability() { - try { - assertNotNull(ollamaAPI.listModels()); - } catch (HttpConnectTimeoutException e) { - fail(e.getMessage()); - } catch (Exception e) { - fail(e); + private File getImageFileFromClasspath(String fileName) { + ClassLoader classLoader = getClass().getClassLoader(); + return new File(Objects.requireNonNull(classLoader.getResource(fileName)).getFile()); } - } - @Test - @Order(2) - void testListModels() { - testEndpointReachability(); - try { - assertNotNull(ollamaAPI.listModels()); - ollamaAPI.listModels().forEach(System.out::println); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - fail(e); + @BeforeEach + void setUp() { + config = new Config(); + ollamaAPI = new OllamaAPI(config.getOllamaURL()); + ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds()); } - } - @Test - @Order(2) - void testPullModel() { - testEndpointReachability(); - try { - ollamaAPI.pullModel(config.getModel()); - boolean found = - ollamaAPI.listModels().stream() - .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); - assertTrue(found); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - fail(e); + @Test + @Order(1) + void testWrongEndpoint() { + OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434"); + assertThrows(ConnectException.class, ollamaAPI::listModels); } - } - @Test - @Order(3) - void testListDtails() { - testEndpointReachability(); - try { - ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); - assertNotNull(modelDetails); - System.out.println(modelDetails); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - fail(e); + @Test + @Order(1) + void testEndpointReachability() { + try { + assertNotNull(ollamaAPI.listModels()); + } catch (HttpConnectTimeoutException e) { + fail(e.getMessage()); + } catch (Exception e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithDefaultOptions() { - testEndpointReachability(); - try { - OllamaResult result = - ollamaAPI.generate( - config.getModel(), - "What is the capital of France? And what's France's connection with Mona Lisa?", - new OptionsBuilder().build()); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(2) + void testListModels() { + testEndpointReachability(); + try { + assertNotNull(ollamaAPI.listModels()); + ollamaAPI.listModels().forEach(System.out::println); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithDefaultOptionsStreamed() { - testEndpointReachability(); - try { - - StringBuffer sb = new StringBuffer(""); - - OllamaResult result = ollamaAPI.generate(config.getModel(), - "What is the capital of France? And what's France's connection with Mona Lisa?", - new OptionsBuilder().build(), (s) -> { - LOG.info(s); - String substring = s.substring(sb.toString().length(), s.length()); - LOG.info(substring); - sb.append(substring); - }); - - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - assertEquals(sb.toString().trim(), result.getResponse().trim()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(2) + void testPullModel() { + testEndpointReachability(); + try { + ollamaAPI.pullModel(config.getModel()); + boolean found = + ollamaAPI.listModels().stream() + .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel())); + assertTrue(found); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithOptions() { - testEndpointReachability(); - try { - OllamaResult result = - ollamaAPI.generate( - config.getModel(), - "What is the capital of France? And what's France's connection with Mona Lisa?", - new OptionsBuilder().setTemperature(0.9f).build()); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(3) + void testListDtails() { + testEndpointReachability(); + try { + ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel()); + assertNotNull(modelDetails); + System.out.println(modelDetails); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + fail(e); + } } - } - @Test - @Order(3) - void testChat() { - testEndpointReachability(); - try { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); - OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") - .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") - .withMessage(OllamaChatMessageRole.USER,"And what is the second larges city?") - .build(); - - OllamaChatResult chatResult = ollamaAPI.chat(requestModel); - assertNotNull(chatResult); - assertFalse(chatResult.getResponse().isBlank()); - assertEquals(4,chatResult.getChatHistory().size()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(3) + void testAskModelWithDefaultOptions() { + testEndpointReachability(); + try { + OllamaResult result = + ollamaAPI.generate( + config.getModel(), + "What is the capital of France? And what's France's connection with Mona Lisa?", + false, + new OptionsBuilder().build()); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testChatWithSystemPrompt() { - testEndpointReachability(); - try { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); - OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, - "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") - .withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); + @Test + @Order(3) + void testAskModelWithDefaultOptionsStreamed() { + testEndpointReachability(); + try { + StringBuffer sb = new StringBuffer(""); + OllamaResult result = ollamaAPI.generate(config.getModel(), + "What is the capital of France? And what's France's connection with Mona Lisa?", + false, + new OptionsBuilder().build(), (s) -> { + LOG.info(s); + String substring = s.substring(sb.toString().length(), s.length()); + LOG.info(substring); + sb.append(substring); + }); - OllamaChatResult chatResult = ollamaAPI.chat(requestModel); - assertNotNull(chatResult); - assertFalse(chatResult.getResponse().isBlank()); - assertTrue(chatResult.getResponse().startsWith("NI")); - assertEquals(3, chatResult.getChatHistory().size()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + assertEquals(sb.toString().trim(), result.getResponse().trim()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testChatWithStream() { - testEndpointReachability(); - try { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); - OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, - "What is the capital of France? And what's France's connection with Mona Lisa?") - .build(); - - StringBuffer sb = new StringBuffer(""); - - OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> { - LOG.info(s); - String substring = s.substring(sb.toString().length(), s.length()); - LOG.info(substring); - sb.append(substring); - }); - assertNotNull(chatResult); - assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(3) + void testAskModelWithOptions() { + testEndpointReachability(); + try { + OllamaResult result = + ollamaAPI.generate( + config.getModel(), + "What is the capital of France? And what's France's connection with Mona Lisa?", + true, + new OptionsBuilder().setTemperature(0.9f).build()); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testChatWithImageFromFileWithHistoryRecognition() { - testEndpointReachability(); - try { - OllamaChatRequestBuilder builder = - OllamaChatRequestBuilder.getInstance(config.getImageModel()); - OllamaChatRequestModel requestModel = - builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", - List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); + @Test + @Order(3) + void testChat() { + testEndpointReachability(); + try { + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?") + .withMessage(OllamaChatMessageRole.ASSISTANT, "Should be Paris!") + .withMessage(OllamaChatMessageRole.USER, "And what is the second larges city?") + .build(); - OllamaChatResult chatResult = ollamaAPI.chat(requestModel); - assertNotNull(chatResult); - assertNotNull(chatResult.getResponse()); - - builder.reset(); - - requestModel = - builder.withMessages(chatResult.getChatHistory()) - .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); - - chatResult = ollamaAPI.chat(requestModel); - assertNotNull(chatResult); - assertNotNull(chatResult.getResponse()); - - - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + OllamaChatResult chatResult = ollamaAPI.chat(requestModel); + assertNotNull(chatResult); + assertFalse(chatResult.getResponse().isBlank()); + assertEquals(4, chatResult.getChatHistory().size()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testChatWithImageFromURL() { - testEndpointReachability(); - try { - OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); - OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", - "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") - .build(); + @Test + @Order(3) + void testChatWithSystemPrompt() { + testEndpointReachability(); + try { + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, + "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!") + .withMessage(OllamaChatMessageRole.USER, + "What is the capital of France? And what's France's connection with Mona Lisa?") + .build(); - OllamaChatResult chatResult = ollamaAPI.chat(requestModel); - assertNotNull(chatResult); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + OllamaChatResult chatResult = ollamaAPI.chat(requestModel); + assertNotNull(chatResult); + assertFalse(chatResult.getResponse().isBlank()); + assertTrue(chatResult.getResponse().startsWith("NI")); + assertEquals(3, chatResult.getChatHistory().size()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithOptionsAndImageFiles() { - testEndpointReachability(); - File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); - try { - OllamaResult result = - ollamaAPI.generateWithImageFiles( - config.getImageModel(), - "What is in this image?", - List.of(imageFile), - new OptionsBuilder().build()); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(3) + void testChatWithStream() { + testEndpointReachability(); + try { + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel()); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, + "What is the capital of France? And what's France's connection with Mona Lisa?") + .build(); + + StringBuffer sb = new StringBuffer(""); + + OllamaChatResult chatResult = ollamaAPI.chat(requestModel, (s) -> { + LOG.info(s); + String substring = s.substring(sb.toString().length(), s.length()); + LOG.info(substring); + sb.append(substring); + }); + assertNotNull(chatResult); + assertEquals(sb.toString().trim(), chatResult.getResponse().trim()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithOptionsAndImageFilesStreamed() { - testEndpointReachability(); - File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); - try { - StringBuffer sb = new StringBuffer(""); + @Test + @Order(3) + void testChatWithImageFromFileWithHistoryRecognition() { + testEndpointReachability(); + try { + OllamaChatRequestBuilder builder = + OllamaChatRequestBuilder.getInstance(config.getImageModel()); + OllamaChatRequestModel requestModel = + builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", + List.of(getImageFileFromClasspath("dog-on-a-boat.jpg"))).build(); - OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), - "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { - LOG.info(s); - String substring = s.substring(sb.toString().length(), s.length()); - LOG.info(substring); - sb.append(substring); - }); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - assertEquals(sb.toString().trim(), result.getResponse().trim()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + OllamaChatResult chatResult = ollamaAPI.chat(requestModel); + assertNotNull(chatResult); + assertNotNull(chatResult.getResponse()); + + builder.reset(); + + requestModel = + builder.withMessages(chatResult.getChatHistory()) + .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build(); + + chatResult = ollamaAPI.chat(requestModel); + assertNotNull(chatResult); + assertNotNull(chatResult.getResponse()); + + + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - void testAskModelWithOptionsAndImageURLs() { - testEndpointReachability(); - try { - OllamaResult result = - ollamaAPI.generateWithImageURLs( - config.getImageModel(), - "What is in this image?", - List.of( - "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), - new OptionsBuilder().build()); - assertNotNull(result); - assertNotNull(result.getResponse()); - assertFalse(result.getResponse().isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - fail(e); + @Test + @Order(3) + void testChatWithImageFromURL() { + testEndpointReachability(); + try { + OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getImageModel()); + OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?", + "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg") + .build(); + + OllamaChatResult chatResult = ollamaAPI.chat(requestModel); + assertNotNull(chatResult); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } - @Test - @Order(3) - public void testEmbedding() { - testEndpointReachability(); - try { - OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder - .getInstance(config.getModel(), "What is the capital of France?").build(); - - List embeddings = ollamaAPI.generateEmbeddings(request); - - assertNotNull(embeddings); - assertFalse(embeddings.isEmpty()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - fail(e); + @Test + @Order(3) + void testAskModelWithOptionsAndImageFiles() { + testEndpointReachability(); + File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); + try { + OllamaResult result = + ollamaAPI.generateWithImageFiles( + config.getImageModel(), + "What is in this image?", + List.of(imageFile), + new OptionsBuilder().build()); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } + } + + @Test + @Order(3) + void testAskModelWithOptionsAndImageFilesStreamed() { + testEndpointReachability(); + File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg"); + try { + StringBuffer sb = new StringBuffer(""); + + OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(), + "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> { + LOG.info(s); + String substring = s.substring(sb.toString().length(), s.length()); + LOG.info(substring); + sb.append(substring); + }); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + assertEquals(sb.toString().trim(), result.getResponse().trim()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } + } + + @Test + @Order(3) + void testAskModelWithOptionsAndImageURLs() { + testEndpointReachability(); + try { + OllamaResult result = + ollamaAPI.generateWithImageURLs( + config.getImageModel(), + "What is in this image?", + List.of( + "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"), + new OptionsBuilder().build()); + assertNotNull(result); + assertNotNull(result.getResponse()); + assertFalse(result.getResponse().isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + fail(e); + } + } + + @Test + @Order(3) + public void testEmbedding() { + testEndpointReachability(); + try { + OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder + .getInstance(config.getModel(), "What is the capital of France?").build(); + + List embeddings = ollamaAPI.generateEmbeddings(request); + + assertNotNull(embeddings); + assertFalse(embeddings.isEmpty()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + fail(e); + } } - } } @Data class Config { - private String ollamaURL; - private String model; - private String imageModel; - private int requestTimeoutSeconds; + private String ollamaURL; + private String model; + private String imageModel; + private int requestTimeoutSeconds; - public Config() { - Properties properties = new Properties(); - try (InputStream input = - getClass().getClassLoader().getResourceAsStream("test-config.properties")) { - if (input == null) { - throw new RuntimeException("Sorry, unable to find test-config.properties"); - } - properties.load(input); - this.ollamaURL = properties.getProperty("ollama.url"); - this.model = properties.getProperty("ollama.model"); - this.imageModel = properties.getProperty("ollama.model.image"); - this.requestTimeoutSeconds = - Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds")); - } catch (IOException e) { - throw new RuntimeException("Error loading properties", e); + public Config() { + Properties properties = new Properties(); + try (InputStream input = + getClass().getClassLoader().getResourceAsStream("test-config.properties")) { + if (input == null) { + throw new RuntimeException("Sorry, unable to find test-config.properties"); + } + properties.load(input); + this.ollamaURL = properties.getProperty("ollama.url"); + this.model = properties.getProperty("ollama.model"); + this.imageModel = properties.getProperty("ollama.model.image"); + this.requestTimeoutSeconds = + Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds")); + } catch (IOException e) { + throw new RuntimeException("Error loading properties", e); + } } - } } diff --git a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/TestMockedAPIs.java b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/TestMockedAPIs.java index 879c67c..c5d60e1 100644 --- a/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/TestMockedAPIs.java +++ b/src/test/java/io/github/amithkoujalgi/ollama4j/unittests/TestMockedAPIs.java @@ -1,7 +1,5 @@ package io.github.amithkoujalgi.ollama4j.unittests; -import static org.mockito.Mockito.*; - import io.github.amithkoujalgi.ollama4j.core.OllamaAPI; import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException; import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail; @@ -9,155 +7,158 @@ import io.github.amithkoujalgi.ollama4j.core.models.OllamaAsyncResultCallback; import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult; import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType; import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; + +import static org.mockito.Mockito.*; class TestMockedAPIs { - @Test - void testPullModel() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - try { - doNothing().when(ollamaAPI).pullModel(model); - ollamaAPI.pullModel(model); - verify(ollamaAPI, times(1)).pullModel(model); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testPullModel() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + try { + doNothing().when(ollamaAPI).pullModel(model); + ollamaAPI.pullModel(model); + verify(ollamaAPI, times(1)).pullModel(model); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testListModels() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - try { - when(ollamaAPI.listModels()).thenReturn(new ArrayList<>()); - ollamaAPI.listModels(); - verify(ollamaAPI, times(1)).listModels(); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testListModels() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + try { + when(ollamaAPI.listModels()).thenReturn(new ArrayList<>()); + ollamaAPI.listModels(); + verify(ollamaAPI, times(1)).listModels(); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testCreateModel() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros."; - try { - doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath); - ollamaAPI.createModelWithModelFileContents(model, modelFilePath); - verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testCreateModel() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String modelFilePath = "FROM llama2\nSYSTEM You are mario from Super Mario Bros."; + try { + doNothing().when(ollamaAPI).createModelWithModelFileContents(model, modelFilePath); + ollamaAPI.createModelWithModelFileContents(model, modelFilePath); + verify(ollamaAPI, times(1)).createModelWithModelFileContents(model, modelFilePath); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testDeleteModel() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - try { - doNothing().when(ollamaAPI).deleteModel(model, true); - ollamaAPI.deleteModel(model, true); - verify(ollamaAPI, times(1)).deleteModel(model, true); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testDeleteModel() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + try { + doNothing().when(ollamaAPI).deleteModel(model, true); + ollamaAPI.deleteModel(model, true); + verify(ollamaAPI, times(1)).deleteModel(model, true); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testGetModelDetails() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - try { - when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail()); - ollamaAPI.getModelDetails(model); - verify(ollamaAPI, times(1)).getModelDetails(model); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testGetModelDetails() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + try { + when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail()); + ollamaAPI.getModelDetails(model); + verify(ollamaAPI, times(1)).getModelDetails(model); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testGenerateEmbeddings() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String prompt = "some prompt text"; - try { - when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>()); - ollamaAPI.generateEmbeddings(model, prompt); - verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt); - } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + @Test + void testGenerateEmbeddings() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String prompt = "some prompt text"; + try { + when(ollamaAPI.generateEmbeddings(model, prompt)).thenReturn(new ArrayList<>()); + ollamaAPI.generateEmbeddings(model, prompt); + verify(ollamaAPI, times(1)).generateEmbeddings(model, prompt); + } catch (IOException | OllamaBaseException | InterruptedException e) { + throw new RuntimeException(e); + } } - } - @Test - void testAsk() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String prompt = "some prompt text"; - OptionsBuilder optionsBuilder = new OptionsBuilder(); - try { - when(ollamaAPI.generate(model, prompt, optionsBuilder.build())) - .thenReturn(new OllamaResult("", 0, 200)); - ollamaAPI.generate(model, prompt, optionsBuilder.build()); - verify(ollamaAPI, times(1)).generate(model, prompt, optionsBuilder.build()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + @Test + void testAsk() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String prompt = "some prompt text"; + OptionsBuilder optionsBuilder = new OptionsBuilder(); + try { + when(ollamaAPI.generate(model, prompt, false, optionsBuilder.build())) + .thenReturn(new OllamaResult("", 0, 200)); + ollamaAPI.generate(model, prompt, false, optionsBuilder.build()); + verify(ollamaAPI, times(1)).generate(model, prompt, false, optionsBuilder.build()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + throw new RuntimeException(e); + } } - } - @Test - void testAskWithImageFiles() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String prompt = "some prompt text"; - try { - when(ollamaAPI.generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build())) - .thenReturn(new OllamaResult("", 0, 200)); - ollamaAPI.generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - verify(ollamaAPI, times(1)) - .generateWithImageFiles( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - } catch (IOException | OllamaBaseException | InterruptedException e) { - throw new RuntimeException(e); + @Test + void testAskWithImageFiles() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String prompt = "some prompt text"; + try { + when(ollamaAPI.generateWithImageFiles( + model, prompt, Collections.emptyList(), new OptionsBuilder().build())) + .thenReturn(new OllamaResult("", 0, 200)); + ollamaAPI.generateWithImageFiles( + model, prompt, Collections.emptyList(), new OptionsBuilder().build()); + verify(ollamaAPI, times(1)) + .generateWithImageFiles( + model, prompt, Collections.emptyList(), new OptionsBuilder().build()); + } catch (IOException | OllamaBaseException | InterruptedException e) { + throw new RuntimeException(e); + } } - } - @Test - void testAskWithImageURLs() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String prompt = "some prompt text"; - try { - when(ollamaAPI.generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build())) - .thenReturn(new OllamaResult("", 0, 200)); - ollamaAPI.generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - verify(ollamaAPI, times(1)) - .generateWithImageURLs( - model, prompt, Collections.emptyList(), new OptionsBuilder().build()); - } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { - throw new RuntimeException(e); + @Test + void testAskWithImageURLs() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String prompt = "some prompt text"; + try { + when(ollamaAPI.generateWithImageURLs( + model, prompt, Collections.emptyList(), new OptionsBuilder().build())) + .thenReturn(new OllamaResult("", 0, 200)); + ollamaAPI.generateWithImageURLs( + model, prompt, Collections.emptyList(), new OptionsBuilder().build()); + verify(ollamaAPI, times(1)) + .generateWithImageURLs( + model, prompt, Collections.emptyList(), new OptionsBuilder().build()); + } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) { + throw new RuntimeException(e); + } } - } - @Test - void testAskAsync() { - OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); - String model = OllamaModelType.LLAMA2; - String prompt = "some prompt text"; - when(ollamaAPI.generateAsync(model, prompt)) - .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); - ollamaAPI.generateAsync(model, prompt); - verify(ollamaAPI, times(1)).generateAsync(model, prompt); - } + @Test + void testAskAsync() { + OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class); + String model = OllamaModelType.LLAMA2; + String prompt = "some prompt text"; + when(ollamaAPI.generateAsync(model, prompt, false)) + .thenReturn(new OllamaAsyncResultCallback(null, null, 3)); + ollamaAPI.generateAsync(model, prompt, false); + verify(ollamaAPI, times(1)).generateAsync(model, prompt, false); + } } From 30bfdd9c6df10297841c8ebf5d0e0563f341ed0d Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Fri, 12 Jul 2024 11:38:02 +0000 Subject: [PATCH 64/69] [maven-release-plugin] prepare release v1.0.76 --- pom.xml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index c2f3754..2edc7fd 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,10 @@ - + 4.0.0 io.github.amithkoujalgi ollama4j - 1.0.76-SNAPSHOT + 1.0.76 Ollama4j Java library for interacting with Ollama API. @@ -40,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.76 From 953605fa7394f72c8e864202cb05b2cd050c8556 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Fri, 12 Jul 2024 11:38:03 +0000 Subject: [PATCH 65/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2edc7fd..391f89e 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.76 + 1.0.77-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.76 + v1.0.16 From 51fbedad69f869c97ea4ffc89087eb1b13622cd2 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Fri, 12 Jul 2024 17:35:41 +0530 Subject: [PATCH 66/69] Updated README.md --- README.md | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f111d6b..46c77da 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +

+ ### Ollama4j ollama4j-icon @@ -9,16 +11,28 @@ Find more details on the [website](https://amithkoujalgi.github.io/ollama4j/). ![GitHub stars](https://img.shields.io/github/stars/amithkoujalgi/ollama4j) ![GitHub forks](https://img.shields.io/github/forks/amithkoujalgi/ollama4j) ![GitHub watchers](https://img.shields.io/github/watchers/amithkoujalgi/ollama4j) +![Contributors](https://img.shields.io/github/contributors/amithkoujalgi/ollama4j) +![GitHub License](https://img.shields.io/github/license/amithkoujalgi/ollama4j) + ![GitHub repo size](https://img.shields.io/github/repo-size/amithkoujalgi/ollama4j) -![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j) ![GitHub top language](https://img.shields.io/github/languages/top/amithkoujalgi/ollama4j) ![GitHub last commit](https://img.shields.io/github/last-commit/amithkoujalgi/ollama4j?color=green) -![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false) - [![codecov](https://codecov.io/gh/amithkoujalgi/ollama4j/graph/badge.svg?token=U0TE7BGP8L)](https://codecov.io/gh/amithkoujalgi/ollama4j) +![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-raw/amithkoujalgi/ollama4j) +![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-closed-raw/amithkoujalgi/ollama4j) +![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-pr-raw/amithkoujalgi/ollama4j) +![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-pr-closed-raw/amithkoujalgi/ollama4j) +![GitHub Discussions](https://img.shields.io/github/discussions/amithkoujalgi/ollama4j) + ![Build Status](https://github.com/amithkoujalgi/ollama4j/actions/workflows/maven-publish.yml/badge.svg) +
+ +[//]: # (![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Famithkoujalgi%2Follama4j&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false)) + +[//]: # (![GitHub language count](https://img.shields.io/github/languages/count/amithkoujalgi/ollama4j)) + ## Table of Contents - [How does it work?](#how-does-it-work) @@ -172,16 +186,28 @@ Contributions are most welcome! Whether it's reporting a bug, proposing an enhan with code - any sort of contribution is much appreciated. +### References + +- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) + ### Credits The nomenclature and the icon have been adopted from the incredible [Ollama](https://ollama.ai/) project. -### References -- [Ollama REST APIs](https://github.com/jmorganca/ollama/blob/main/docs/api.md) +
-## Appreciate my work? +**Thanks to the amazing contributors** + + + + + +### Appreciate my work? Buy Me A Coffee + +
+ From e8d709e99a5accb3e119c664d6bc81e624d94696 Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Fri, 12 Jul 2024 12:07:00 +0000 Subject: [PATCH 67/69] [maven-release-plugin] prepare release v1.0.77 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 391f89e..818ed5f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.77-SNAPSHOT + 1.0.77 Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.16 + v1.0.77 From cf52c9610c2cc6538b112add5cbb825aa5deaa5d Mon Sep 17 00:00:00 2001 From: amithkoujalgi Date: Fri, 12 Jul 2024 12:07:01 +0000 Subject: [PATCH 68/69] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 818ed5f..2892ed2 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ io.github.amithkoujalgi ollama4j - 1.0.77 + 1.0.78-SNAPSHOT Ollama4j Java library for interacting with Ollama API. @@ -39,7 +39,7 @@ scm:git:git@github.com:amithkoujalgi/ollama4j.git scm:git:https://github.com/amithkoujalgi/ollama4j.git https://github.com/amithkoujalgi/ollama4j - v1.0.77 + v1.0.16 From dd9ba7c937d014da9c832cc8ac1ad5a577e921a5 Mon Sep 17 00:00:00 2001 From: Amith Koujalgi Date: Sat, 13 Jul 2024 11:50:52 +0530 Subject: [PATCH 69/69] discarded outdated GH workflows Signed-off-by: Amith Koujalgi --- .github/workflows/maven-publish.yml | 134 +++++++++++++------------- .github/workflows/publish-javadoc.yml | 104 ++++++++++---------- 2 files changed, 119 insertions(+), 119 deletions(-) diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index e1ac127..430c375 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -1,68 +1,68 @@ -# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created -# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path - -name: Test and Publish Package - +## This workflow will build a package using Maven and then publish it to GitHub packages when a release is created +## For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path +# +#name: Test and Publish Package +# +##on: +## release: +## types: [ "created" ] +# #on: -# release: -# types: [ "created" ] - -on: - push: - branches: [ "main" ] - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - permissions: - contents: write - packages: write - steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: '11' - distribution: 'adopt-hotspot' - server-id: github # Value of the distributionManagement/repository/id field of the pom.xml - settings-path: ${{ github.workspace }} # location for the settings.xml file - - name: Build with Maven - run: mvn --file pom.xml -U clean package -Punit-tests - - name: Set up Apache Maven Central (Overwrite settings.xml) - uses: actions/setup-java@v3 - with: # running setup-java again overwrites the settings.xml - java-version: '11' - distribution: 'adopt-hotspot' - cache: 'maven' - server-id: ossrh - server-username: MAVEN_USERNAME - server-password: MAVEN_PASSWORD - gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} - gpg-passphrase: MAVEN_GPG_PASSPHRASE - - name: Set up Maven cache - uses: actions/cache@v3 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - ${{ runner.os }}-maven- - - name: Build - run: mvn -B -ntp clean install - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - - name: Publish to GitHub Packages Apache Maven - # if: > - # github.event_name != 'pull_request' && - # github.ref_name == 'main' && - # contains(github.event.head_commit.message, 'release') - run: | - git config --global user.email "koujalgi.amith@gmail.com" - git config --global user.name "amithkoujalgi" - mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform - env: - MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} - MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} - MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} \ No newline at end of file +# push: +# branches: [ "main" ] +# workflow_dispatch: +# +#jobs: +# build: +# runs-on: ubuntu-latest +# permissions: +# contents: write +# packages: write +# steps: +# - uses: actions/checkout@v3 +# - name: Set up JDK 11 +# uses: actions/setup-java@v3 +# with: +# java-version: '11' +# distribution: 'adopt-hotspot' +# server-id: github # Value of the distributionManagement/repository/id field of the pom.xml +# settings-path: ${{ github.workspace }} # location for the settings.xml file +# - name: Build with Maven +# run: mvn --file pom.xml -U clean package -Punit-tests +# - name: Set up Apache Maven Central (Overwrite settings.xml) +# uses: actions/setup-java@v3 +# with: # running setup-java again overwrites the settings.xml +# java-version: '11' +# distribution: 'adopt-hotspot' +# cache: 'maven' +# server-id: ossrh +# server-username: MAVEN_USERNAME +# server-password: MAVEN_PASSWORD +# gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} +# gpg-passphrase: MAVEN_GPG_PASSPHRASE +# - name: Set up Maven cache +# uses: actions/cache@v3 +# with: +# path: ~/.m2/repository +# key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} +# restore-keys: | +# ${{ runner.os }}-maven- +# - name: Build +# run: mvn -B -ntp clean install +# - name: Upload coverage reports to Codecov +# uses: codecov/codecov-action@v3 +# env: +# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} +# - name: Publish to GitHub Packages Apache Maven +# # if: > +# # github.event_name != 'pull_request' && +# # github.ref_name == 'main' && +# # contains(github.event.head_commit.message, 'release') +# run: | +# git config --global user.email "koujalgi.amith@gmail.com" +# git config --global user.name "amithkoujalgi" +# mvn -B -ntp -DskipTests -Pci-cd -Darguments="-DskipTests -Pci-cd" release:clean release:prepare release:perform +# env: +# MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }} +# MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} +# MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} \ No newline at end of file diff --git a/.github/workflows/publish-javadoc.yml b/.github/workflows/publish-javadoc.yml index a346dbf..2eba051 100644 --- a/.github/workflows/publish-javadoc.yml +++ b/.github/workflows/publish-javadoc.yml @@ -1,52 +1,52 @@ -# Simple workflow for deploying static content to GitHub Pages -name: Deploy Javadoc content to Pages - -on: - # Runs on pushes targeting the default branch - push: - branches: [ "none" ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: read - pages: write - id-token: write - packages: write -# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. -# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. -concurrency: - group: "pages" - cancel-in-progress: false - -jobs: - # Single deploy job since we're just deploying - deploy: - runs-on: ubuntu-latest - - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: '11' - distribution: 'adopt-hotspot' - server-id: github # Value of the distributionManagement/repository/id field of the pom.xml - settings-path: ${{ github.workspace }} # location for the settings.xml file - - name: Build with Maven - run: mvn --file pom.xml -U clean package - - name: Setup Pages - uses: actions/configure-pages@v3 - - name: Upload artifact - uses: actions/upload-pages-artifact@v2 - with: - # Upload entire repository - path: './target/apidocs/.' - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v2 +## Simple workflow for deploying static content to GitHub Pages +#name: Deploy Javadoc content to Pages +# +#on: +# # Runs on pushes targeting the default branch +# push: +# branches: [ "none" ] +# +# # Allows you to run this workflow manually from the Actions tab +# workflow_dispatch: +# +## Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +#permissions: +# contents: read +# pages: write +# id-token: write +# packages: write +## Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +## However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +#concurrency: +# group: "pages" +# cancel-in-progress: false +# +#jobs: +# # Single deploy job since we're just deploying +# deploy: +# runs-on: ubuntu-latest +# +# environment: +# name: github-pages +# url: ${{ steps.deployment.outputs.page_url }} +# steps: +# - uses: actions/checkout@v3 +# - name: Set up JDK 11 +# uses: actions/setup-java@v3 +# with: +# java-version: '11' +# distribution: 'adopt-hotspot' +# server-id: github # Value of the distributionManagement/repository/id field of the pom.xml +# settings-path: ${{ github.workspace }} # location for the settings.xml file +# - name: Build with Maven +# run: mvn --file pom.xml -U clean package +# - name: Setup Pages +# uses: actions/configure-pages@v3 +# - name: Upload artifact +# uses: actions/upload-pages-artifact@v2 +# with: +# # Upload entire repository +# path: './target/apidocs/.' +# - name: Deploy to GitHub Pages +# id: deployment +# uses: actions/deploy-pages@v2