diff --git a/docs/docs/apis-extras/ps.md b/docs/docs/apis-extras/ps.md index d8641a0..43b0af5 100644 --- a/docs/docs/apis-extras/ps.md +++ b/docs/docs/apis-extras/ps.md @@ -12,14 +12,14 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs package io.github.ollama4j.localtests; import io.github.ollama4j.OllamaAPI; -import io.github.ollama4j.models.ps.ModelProcessesResponse; +import io.github.ollama4j.models.ps.ModelProcessesResult; public class Main { public static void main(String[] args) { OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434"); - ModelProcessesResponse response = ollamaAPI.ps(); + ModelProcessesResult response = ollamaAPI.ps(); System.out.println(response); } diff --git a/src/main/java/io/github/ollama4j/OllamaAPI.java b/src/main/java/io/github/ollama4j/OllamaAPI.java index 68931e1..7e095d2 100644 --- a/src/main/java/io/github/ollama4j/OllamaAPI.java +++ b/src/main/java/io/github/ollama4j/OllamaAPI.java @@ -16,11 +16,11 @@ import io.github.ollama4j.metrics.MetricsRecorder; import io.github.ollama4j.models.chat.*; import io.github.ollama4j.models.chat.OllamaChatTokenHandler; import io.github.ollama4j.models.embed.OllamaEmbedRequest; -import io.github.ollama4j.models.embed.OllamaEmbedResponse; +import io.github.ollama4j.models.embed.OllamaEmbedResult; import io.github.ollama4j.models.generate.OllamaGenerateRequest; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler; -import io.github.ollama4j.models.ps.ModelProcessesResponse; +import io.github.ollama4j.models.ps.ModelProcessesResult; import io.github.ollama4j.models.request.*; import io.github.ollama4j.models.response.*; import io.github.ollama4j.tools.*; @@ -186,10 +186,10 @@ public class OllamaAPI { /** * Provides a list of running models and details about each model currently loaded into memory. * - * @return ModelsProcessResponse containing details about the running models + * @return ModelsProcessResult containing details about the running models * @throws OllamaException if the response indicates an error status */ - public ModelProcessesResponse ps() throws OllamaException { + public ModelProcessesResult ps() throws OllamaException { long startTime = System.currentTimeMillis(); String url = "/api/ps"; int statusCode = -1; @@ -217,7 +217,7 @@ public class OllamaAPI { String responseString = response.body(); if (statusCode == 200) { return Utils.getObjectMapper() - .readValue(responseString, ModelProcessesResponse.class); + .readValue(responseString, ModelProcessesResult.class); } else { throw new OllamaException(statusCode + " - " + responseString); } @@ -719,7 +719,7 @@ public class OllamaAPI { * @return embeddings * @throws OllamaException if the response indicates an error status */ - public OllamaEmbedResponse embed(OllamaEmbedRequest modelRequest) throws OllamaException { + public OllamaEmbedResult embed(OllamaEmbedRequest modelRequest) throws OllamaException { long startTime = System.currentTimeMillis(); String url = "/api/embed"; int statusCode = -1; @@ -739,7 +739,7 @@ public class OllamaAPI { statusCode = response.statusCode(); String responseBody = response.body(); if (statusCode == 200) { - return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponse.class); + return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResult.class); } else { throw new OllamaException(statusCode + " - " + responseBody); } diff --git a/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResponse.java b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java similarity index 95% rename from src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResponse.java rename to src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java index 060b4c6..512872d 100644 --- a/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResponse.java +++ b/src/main/java/io/github/ollama4j/models/embed/OllamaEmbedResult.java @@ -14,7 +14,7 @@ import lombok.Data; @SuppressWarnings("unused") @Data -public class OllamaEmbedResponse { +public class OllamaEmbedResult { @JsonProperty("model") private String model; diff --git a/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResponse.java b/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java similarity index 97% rename from src/main/java/io/github/ollama4j/models/ps/ModelProcessesResponse.java rename to src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java index 518205e..257d019 100644 --- a/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResponse.java +++ b/src/main/java/io/github/ollama4j/models/ps/ModelProcessesResult.java @@ -17,7 +17,7 @@ import lombok.NoArgsConstructor; @Data @NoArgsConstructor @JsonIgnoreProperties(ignoreUnknown = true) -public class ModelProcessesResponse { +public class ModelProcessesResult { @JsonProperty("models") private List models; diff --git a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java index 638f32c..c86856e 100644 --- a/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java +++ b/src/test/java/io/github/ollama4j/integrationtests/OllamaAPIIntegrationTest.java @@ -16,7 +16,7 @@ import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler; import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler; import io.github.ollama4j.models.chat.*; import io.github.ollama4j.models.embed.OllamaEmbedRequest; -import io.github.ollama4j.models.embed.OllamaEmbedResponse; +import io.github.ollama4j.models.embed.OllamaEmbedResult; import io.github.ollama4j.models.generate.OllamaGenerateRequest; import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; @@ -234,7 +234,7 @@ class OllamaAPIIntegrationTest { OllamaEmbedRequest m = new OllamaEmbedRequest(); m.setModel(EMBEDDING_MODEL); m.setInput(Arrays.asList("Why is the sky blue?", "Why is the grass green?")); - OllamaEmbedResponse embeddings = api.embed(m); + OllamaEmbedResult embeddings = api.embed(m); assertNotNull(embeddings, "Embeddings should not be null"); assertFalse(embeddings.getEmbeddings().isEmpty(), "Embeddings should not be empty"); } @@ -1333,7 +1333,7 @@ class OllamaAPIIntegrationTest { requestModel.setInput( Collections.singletonList("This is a single test sentence for embedding.")); - OllamaEmbedResponse embeddings = api.embed(requestModel); + OllamaEmbedResult embeddings = api.embed(requestModel); assertNotNull(embeddings); assertFalse(embeddings.getEmbeddings().isEmpty()); diff --git a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java index 7140146..eaeb30b 100644 --- a/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java +++ b/src/test/java/io/github/ollama4j/unittests/TestMockedAPIs.java @@ -17,7 +17,7 @@ import io.github.ollama4j.exceptions.OllamaException; import io.github.ollama4j.exceptions.RoleNotFoundException; import io.github.ollama4j.models.chat.OllamaChatMessageRole; import io.github.ollama4j.models.embed.OllamaEmbedRequest; -import io.github.ollama4j.models.embed.OllamaEmbedResponse; +import io.github.ollama4j.models.embed.OllamaEmbedResult; import io.github.ollama4j.models.generate.OllamaGenerateRequest; import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder; import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver; @@ -112,7 +112,7 @@ class TestMockedAPIs { OllamaEmbedRequest m = new OllamaEmbedRequest(); m.setModel(model); m.setInput(List.of(prompt)); - when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse()); + when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult()); ollamaAPI.embed(m); verify(ollamaAPI, times(1)).embed(m); } catch (OllamaException e) { @@ -127,7 +127,7 @@ class TestMockedAPIs { List inputs = List.of("some prompt text"); try { OllamaEmbedRequest m = new OllamaEmbedRequest(model, inputs); - when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse()); + when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult()); ollamaAPI.embed(m); verify(ollamaAPI, times(1)).embed(m); } catch (OllamaException e) { @@ -142,7 +142,7 @@ class TestMockedAPIs { List inputs = List.of("some prompt text"); try { when(ollamaAPI.embed(new OllamaEmbedRequest(model, inputs))) - .thenReturn(new OllamaEmbedResponse()); + .thenReturn(new OllamaEmbedResult()); ollamaAPI.embed(new OllamaEmbedRequest(model, inputs)); verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequest(model, inputs)); } catch (OllamaException e) {