Refactor model response classes and update API methods

- Renamed `ModelProcessesResponse` to `ModelProcessesResult` and updated all related references in the codebase.
- Introduced `OllamaEmbedResult` class to replace `OllamaEmbedResponse`, ensuring consistency across the API.
- Updated method signatures in `OllamaAPI` to reflect the new class names and adjusted integration tests accordingly.
This commit is contained in:
amithkoujalgi 2025-09-28 22:52:24 +05:30
parent dd1022a990
commit 61fe8b2b56
No known key found for this signature in database
GPG Key ID: E29A37746AF94B70
6 changed files with 18 additions and 18 deletions

View File

@ -12,14 +12,14 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.models.ps.ModelProcessesResponse;
import io.github.ollama4j.models.ps.ModelProcessesResult;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelProcessesResponse response = ollamaAPI.ps();
ModelProcessesResult response = ollamaAPI.ps();
System.out.println(response);
}

View File

@ -16,11 +16,11 @@ import io.github.ollama4j.metrics.MetricsRecorder;
import io.github.ollama4j.models.chat.*;
import io.github.ollama4j.models.chat.OllamaChatTokenHandler;
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
import io.github.ollama4j.models.embed.OllamaEmbedResult;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler;
import io.github.ollama4j.models.ps.ModelProcessesResponse;
import io.github.ollama4j.models.ps.ModelProcessesResult;
import io.github.ollama4j.models.request.*;
import io.github.ollama4j.models.response.*;
import io.github.ollama4j.tools.*;
@ -186,10 +186,10 @@ public class OllamaAPI {
/**
* Provides a list of running models and details about each model currently loaded into memory.
*
* @return ModelsProcessResponse containing details about the running models
* @return ModelsProcessResult containing details about the running models
* @throws OllamaException if the response indicates an error status
*/
public ModelProcessesResponse ps() throws OllamaException {
public ModelProcessesResult ps() throws OllamaException {
long startTime = System.currentTimeMillis();
String url = "/api/ps";
int statusCode = -1;
@ -217,7 +217,7 @@ public class OllamaAPI {
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper()
.readValue(responseString, ModelProcessesResponse.class);
.readValue(responseString, ModelProcessesResult.class);
} else {
throw new OllamaException(statusCode + " - " + responseString);
}
@ -719,7 +719,7 @@ public class OllamaAPI {
* @return embeddings
* @throws OllamaException if the response indicates an error status
*/
public OllamaEmbedResponse embed(OllamaEmbedRequest modelRequest) throws OllamaException {
public OllamaEmbedResult embed(OllamaEmbedRequest modelRequest) throws OllamaException {
long startTime = System.currentTimeMillis();
String url = "/api/embed";
int statusCode = -1;
@ -739,7 +739,7 @@ public class OllamaAPI {
statusCode = response.statusCode();
String responseBody = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponse.class);
return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResult.class);
} else {
throw new OllamaException(statusCode + " - " + responseBody);
}

View File

@ -14,7 +14,7 @@ import lombok.Data;
@SuppressWarnings("unused")
@Data
public class OllamaEmbedResponse {
public class OllamaEmbedResult {
@JsonProperty("model")
private String model;

View File

@ -17,7 +17,7 @@ import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class ModelProcessesResponse {
public class ModelProcessesResult {
@JsonProperty("models")
private List<ModelProcess> models;

View File

@ -16,7 +16,7 @@ import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler;
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
import io.github.ollama4j.models.chat.*;
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
import io.github.ollama4j.models.embed.OllamaEmbedResult;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
@ -234,7 +234,7 @@ class OllamaAPIIntegrationTest {
OllamaEmbedRequest m = new OllamaEmbedRequest();
m.setModel(EMBEDDING_MODEL);
m.setInput(Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
OllamaEmbedResponse embeddings = api.embed(m);
OllamaEmbedResult embeddings = api.embed(m);
assertNotNull(embeddings, "Embeddings should not be null");
assertFalse(embeddings.getEmbeddings().isEmpty(), "Embeddings should not be empty");
}
@ -1333,7 +1333,7 @@ class OllamaAPIIntegrationTest {
requestModel.setInput(
Collections.singletonList("This is a single test sentence for embedding."));
OllamaEmbedResponse embeddings = api.embed(requestModel);
OllamaEmbedResult embeddings = api.embed(requestModel);
assertNotNull(embeddings);
assertFalse(embeddings.getEmbeddings().isEmpty());

View File

@ -17,7 +17,7 @@ import io.github.ollama4j.exceptions.OllamaException;
import io.github.ollama4j.exceptions.RoleNotFoundException;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
import io.github.ollama4j.models.embed.OllamaEmbedResult;
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
@ -112,7 +112,7 @@ class TestMockedAPIs {
OllamaEmbedRequest m = new OllamaEmbedRequest();
m.setModel(model);
m.setInput(List.of(prompt));
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse());
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult());
ollamaAPI.embed(m);
verify(ollamaAPI, times(1)).embed(m);
} catch (OllamaException e) {
@ -127,7 +127,7 @@ class TestMockedAPIs {
List<String> inputs = List.of("some prompt text");
try {
OllamaEmbedRequest m = new OllamaEmbedRequest(model, inputs);
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse());
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResult());
ollamaAPI.embed(m);
verify(ollamaAPI, times(1)).embed(m);
} catch (OllamaException e) {
@ -142,7 +142,7 @@ class TestMockedAPIs {
List<String> inputs = List.of("some prompt text");
try {
when(ollamaAPI.embed(new OllamaEmbedRequest(model, inputs)))
.thenReturn(new OllamaEmbedResponse());
.thenReturn(new OllamaEmbedResult());
ollamaAPI.embed(new OllamaEmbedRequest(model, inputs));
verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequest(model, inputs));
} catch (OllamaException e) {