mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-10-14 09:28:58 +02:00
Add Javadoc generation to Makefile and refactor model classes
- Introduced a new `javadoc` target in the Makefile to generate Javadocs. - Refactored model classes: renamed `ModelsProcessResponse` to `ModelProcessesResponse` and updated related references. - Updated `OllamaEmbedRequestModel` and `OllamaEmbedResponseModel` to `OllamaEmbedRequest` and `OllamaEmbedResponse`, respectively, across the codebase. - Added new classes for `OllamaEmbedRequest` and `OllamaEmbedResponse` to improve clarity and maintainability.
This commit is contained in:
parent
36f7d14c68
commit
dd1022a990
10
Makefile
10
Makefile
@ -38,6 +38,16 @@ doxygen:
|
|||||||
@echo "\033[0;34mGenerating documentation with Doxygen...\033[0m"
|
@echo "\033[0;34mGenerating documentation with Doxygen...\033[0m"
|
||||||
@doxygen Doxyfile
|
@doxygen Doxyfile
|
||||||
|
|
||||||
|
javadoc:
|
||||||
|
@echo "\033[0;34mGenerating Javadocs into '$(javadocfolder)'...\033[0m"
|
||||||
|
@mvn clean javadoc:javadoc
|
||||||
|
@if [ -f "target/reports/apidocs/index.html" ]; then \
|
||||||
|
echo "\033[0;32mJavadocs generated in target/reports/apidocs/index.html\033[0m"; \
|
||||||
|
else \
|
||||||
|
echo "\033[0;31mFailed to generate Javadocs in target/reports/apidocs\033[0m"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
|
||||||
list-releases:
|
list-releases:
|
||||||
@echo "\033[0;34mListing latest releases...\033[0m"
|
@echo "\033[0;34mListing latest releases...\033[0m"
|
||||||
@curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \
|
@curl 'https://central.sonatype.com/api/internal/browse/component/versions?sortField=normalizedVersion&sortDirection=desc&page=0&size=20&filter=namespace%3Aio.github.ollama4j%2Cname%3Aollama4j' \
|
||||||
|
@ -12,14 +12,14 @@ This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs
|
|||||||
package io.github.ollama4j.localtests;
|
package io.github.ollama4j.localtests;
|
||||||
|
|
||||||
import io.github.ollama4j.OllamaAPI;
|
import io.github.ollama4j.OllamaAPI;
|
||||||
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
import io.github.ollama4j.models.ps.ModelProcessesResponse;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
|
||||||
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
|
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
|
||||||
|
|
||||||
ModelsProcessResponse response = ollamaAPI.ps();
|
ModelProcessesResponse response = ollamaAPI.ps();
|
||||||
|
|
||||||
System.out.println(response);
|
System.out.println(response);
|
||||||
}
|
}
|
||||||
|
@ -15,12 +15,12 @@ import io.github.ollama4j.exceptions.ToolInvocationException;
|
|||||||
import io.github.ollama4j.metrics.MetricsRecorder;
|
import io.github.ollama4j.metrics.MetricsRecorder;
|
||||||
import io.github.ollama4j.models.chat.*;
|
import io.github.ollama4j.models.chat.*;
|
||||||
import io.github.ollama4j.models.chat.OllamaChatTokenHandler;
|
import io.github.ollama4j.models.chat.OllamaChatTokenHandler;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedResponseModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler;
|
import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler;
|
||||||
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
import io.github.ollama4j.models.ps.ModelProcessesResponse;
|
||||||
import io.github.ollama4j.models.request.*;
|
import io.github.ollama4j.models.request.*;
|
||||||
import io.github.ollama4j.models.response.*;
|
import io.github.ollama4j.models.response.*;
|
||||||
import io.github.ollama4j.tools.*;
|
import io.github.ollama4j.tools.*;
|
||||||
@ -189,7 +189,7 @@ public class OllamaAPI {
|
|||||||
* @return ModelsProcessResponse containing details about the running models
|
* @return ModelsProcessResponse containing details about the running models
|
||||||
* @throws OllamaException if the response indicates an error status
|
* @throws OllamaException if the response indicates an error status
|
||||||
*/
|
*/
|
||||||
public ModelsProcessResponse ps() throws OllamaException {
|
public ModelProcessesResponse ps() throws OllamaException {
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
String url = "/api/ps";
|
String url = "/api/ps";
|
||||||
int statusCode = -1;
|
int statusCode = -1;
|
||||||
@ -217,7 +217,7 @@ public class OllamaAPI {
|
|||||||
String responseString = response.body();
|
String responseString = response.body();
|
||||||
if (statusCode == 200) {
|
if (statusCode == 200) {
|
||||||
return Utils.getObjectMapper()
|
return Utils.getObjectMapper()
|
||||||
.readValue(responseString, ModelsProcessResponse.class);
|
.readValue(responseString, ModelProcessesResponse.class);
|
||||||
} else {
|
} else {
|
||||||
throw new OllamaException(statusCode + " - " + responseString);
|
throw new OllamaException(statusCode + " - " + responseString);
|
||||||
}
|
}
|
||||||
@ -713,14 +713,13 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate embeddings using a {@link OllamaEmbedRequestModel}.
|
* Generate embeddings using a {@link OllamaEmbedRequest}.
|
||||||
*
|
*
|
||||||
* @param modelRequest request for '/api/embed' endpoint
|
* @param modelRequest request for '/api/embed' endpoint
|
||||||
* @return embeddings
|
* @return embeddings
|
||||||
* @throws OllamaException if the response indicates an error status
|
* @throws OllamaException if the response indicates an error status
|
||||||
*/
|
*/
|
||||||
public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest)
|
public OllamaEmbedResponse embed(OllamaEmbedRequest modelRequest) throws OllamaException {
|
||||||
throws OllamaException {
|
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
String url = "/api/embed";
|
String url = "/api/embed";
|
||||||
int statusCode = -1;
|
int statusCode = -1;
|
||||||
@ -740,8 +739,7 @@ public class OllamaAPI {
|
|||||||
statusCode = response.statusCode();
|
statusCode = response.statusCode();
|
||||||
String responseBody = response.body();
|
String responseBody = response.body();
|
||||||
if (statusCode == 200) {
|
if (statusCode == 200) {
|
||||||
return Utils.getObjectMapper()
|
return Utils.getObjectMapper().readValue(responseBody, OllamaEmbedResponse.class);
|
||||||
.readValue(responseBody, OllamaEmbedResponseModel.class);
|
|
||||||
} else {
|
} else {
|
||||||
throw new OllamaException(statusCode + " - " + responseBody);
|
throw new OllamaException(statusCode + " - " + responseBody);
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ import lombok.*;
|
|||||||
@Data
|
@Data
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
public class OllamaEmbedRequestModel {
|
public class OllamaEmbedRequest {
|
||||||
@NonNull private String model;
|
@NonNull private String model;
|
||||||
|
|
||||||
@NonNull private List<String> input;
|
@NonNull private List<String> input;
|
@ -16,10 +16,10 @@ import java.util.List;
|
|||||||
*/
|
*/
|
||||||
public class OllamaEmbedRequestBuilder {
|
public class OllamaEmbedRequestBuilder {
|
||||||
|
|
||||||
private final OllamaEmbedRequestModel request;
|
private final OllamaEmbedRequest request;
|
||||||
|
|
||||||
private OllamaEmbedRequestBuilder(String model, List<String> input) {
|
private OllamaEmbedRequestBuilder(String model, List<String> input) {
|
||||||
this.request = new OllamaEmbedRequestModel(model, input);
|
this.request = new OllamaEmbedRequest(model, input);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static OllamaEmbedRequestBuilder getInstance(String model, String... input) {
|
public static OllamaEmbedRequestBuilder getInstance(String model, String... input) {
|
||||||
@ -41,7 +41,7 @@ public class OllamaEmbedRequestBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public OllamaEmbedRequestModel build() {
|
public OllamaEmbedRequest build() {
|
||||||
return this.request;
|
return this.request;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ import lombok.Data;
|
|||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
@Data
|
@Data
|
||||||
public class OllamaEmbedResponseModel {
|
public class OllamaEmbedResponse {
|
||||||
@JsonProperty("model")
|
@JsonProperty("model")
|
||||||
private String model;
|
private String model;
|
||||||
|
|
@ -17,7 +17,7 @@ import lombok.NoArgsConstructor;
|
|||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
public class ModelsProcessResponse {
|
public class ModelProcessesResponse {
|
||||||
@JsonProperty("models")
|
@JsonProperty("models")
|
||||||
private List<ModelProcess> models;
|
private List<ModelProcess> models;
|
||||||
|
|
@ -15,8 +15,8 @@ import io.github.ollama4j.exceptions.OllamaException;
|
|||||||
import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler;
|
import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler;
|
||||||
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
|
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
|
||||||
import io.github.ollama4j.models.chat.*;
|
import io.github.ollama4j.models.chat.*;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedResponseModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
||||||
@ -231,10 +231,10 @@ class OllamaAPIIntegrationTest {
|
|||||||
@Order(5)
|
@Order(5)
|
||||||
void shouldReturnEmbeddings() throws Exception {
|
void shouldReturnEmbeddings() throws Exception {
|
||||||
api.pullModel(EMBEDDING_MODEL);
|
api.pullModel(EMBEDDING_MODEL);
|
||||||
OllamaEmbedRequestModel m = new OllamaEmbedRequestModel();
|
OllamaEmbedRequest m = new OllamaEmbedRequest();
|
||||||
m.setModel(EMBEDDING_MODEL);
|
m.setModel(EMBEDDING_MODEL);
|
||||||
m.setInput(Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
|
m.setInput(Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
|
||||||
OllamaEmbedResponseModel embeddings = api.embed(m);
|
OllamaEmbedResponse embeddings = api.embed(m);
|
||||||
assertNotNull(embeddings, "Embeddings should not be null");
|
assertNotNull(embeddings, "Embeddings should not be null");
|
||||||
assertFalse(embeddings.getEmbeddings().isEmpty(), "Embeddings should not be empty");
|
assertFalse(embeddings.getEmbeddings().isEmpty(), "Embeddings should not be empty");
|
||||||
}
|
}
|
||||||
@ -1328,12 +1328,12 @@ class OllamaAPIIntegrationTest {
|
|||||||
void shouldReturnEmbeddingsForSingleInput() throws Exception {
|
void shouldReturnEmbeddingsForSingleInput() throws Exception {
|
||||||
api.pullModel(EMBEDDING_MODEL);
|
api.pullModel(EMBEDDING_MODEL);
|
||||||
|
|
||||||
OllamaEmbedRequestModel requestModel = new OllamaEmbedRequestModel();
|
OllamaEmbedRequest requestModel = new OllamaEmbedRequest();
|
||||||
requestModel.setModel(EMBEDDING_MODEL);
|
requestModel.setModel(EMBEDDING_MODEL);
|
||||||
requestModel.setInput(
|
requestModel.setInput(
|
||||||
Collections.singletonList("This is a single test sentence for embedding."));
|
Collections.singletonList("This is a single test sentence for embedding."));
|
||||||
|
|
||||||
OllamaEmbedResponseModel embeddings = api.embed(requestModel);
|
OllamaEmbedResponse embeddings = api.embed(requestModel);
|
||||||
|
|
||||||
assertNotNull(embeddings);
|
assertNotNull(embeddings);
|
||||||
assertFalse(embeddings.getEmbeddings().isEmpty());
|
assertFalse(embeddings.getEmbeddings().isEmpty());
|
||||||
|
@ -16,8 +16,8 @@ import io.github.ollama4j.OllamaAPI;
|
|||||||
import io.github.ollama4j.exceptions.OllamaException;
|
import io.github.ollama4j.exceptions.OllamaException;
|
||||||
import io.github.ollama4j.exceptions.RoleNotFoundException;
|
import io.github.ollama4j.exceptions.RoleNotFoundException;
|
||||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedResponseModel;
|
import io.github.ollama4j.models.embed.OllamaEmbedResponse;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
||||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
||||||
@ -109,10 +109,10 @@ class TestMockedAPIs {
|
|||||||
String model = "llama2";
|
String model = "llama2";
|
||||||
String prompt = "some prompt text";
|
String prompt = "some prompt text";
|
||||||
try {
|
try {
|
||||||
OllamaEmbedRequestModel m = new OllamaEmbedRequestModel();
|
OllamaEmbedRequest m = new OllamaEmbedRequest();
|
||||||
m.setModel(model);
|
m.setModel(model);
|
||||||
m.setInput(List.of(prompt));
|
m.setInput(List.of(prompt));
|
||||||
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponseModel());
|
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse());
|
||||||
ollamaAPI.embed(m);
|
ollamaAPI.embed(m);
|
||||||
verify(ollamaAPI, times(1)).embed(m);
|
verify(ollamaAPI, times(1)).embed(m);
|
||||||
} catch (OllamaException e) {
|
} catch (OllamaException e) {
|
||||||
@ -126,8 +126,8 @@ class TestMockedAPIs {
|
|||||||
String model = "llama2";
|
String model = "llama2";
|
||||||
List<String> inputs = List.of("some prompt text");
|
List<String> inputs = List.of("some prompt text");
|
||||||
try {
|
try {
|
||||||
OllamaEmbedRequestModel m = new OllamaEmbedRequestModel(model, inputs);
|
OllamaEmbedRequest m = new OllamaEmbedRequest(model, inputs);
|
||||||
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponseModel());
|
when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponse());
|
||||||
ollamaAPI.embed(m);
|
ollamaAPI.embed(m);
|
||||||
verify(ollamaAPI, times(1)).embed(m);
|
verify(ollamaAPI, times(1)).embed(m);
|
||||||
} catch (OllamaException e) {
|
} catch (OllamaException e) {
|
||||||
@ -141,10 +141,10 @@ class TestMockedAPIs {
|
|||||||
String model = "llama2";
|
String model = "llama2";
|
||||||
List<String> inputs = List.of("some prompt text");
|
List<String> inputs = List.of("some prompt text");
|
||||||
try {
|
try {
|
||||||
when(ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs)))
|
when(ollamaAPI.embed(new OllamaEmbedRequest(model, inputs)))
|
||||||
.thenReturn(new OllamaEmbedResponseModel());
|
.thenReturn(new OllamaEmbedResponse());
|
||||||
ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs));
|
ollamaAPI.embed(new OllamaEmbedRequest(model, inputs));
|
||||||
verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequestModel(model, inputs));
|
verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequest(model, inputs));
|
||||||
} catch (OllamaException e) {
|
} catch (OllamaException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,10 @@ class TestOptionsAndUtils {
|
|||||||
void testOptionsBuilderRejectsUnsupportedCustomType() {
|
void testOptionsBuilderRejectsUnsupportedCustomType() {
|
||||||
assertThrows(
|
assertThrows(
|
||||||
IllegalArgumentException.class,
|
IllegalArgumentException.class,
|
||||||
() -> new OptionsBuilder().setCustomOption("bad", new Object()));
|
() -> {
|
||||||
|
OptionsBuilder builder = new OptionsBuilder();
|
||||||
|
builder.setCustomOption("bad", new Object());
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -10,13 +10,13 @@ package io.github.ollama4j.unittests.jackson;
|
|||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import io.github.ollama4j.models.embed.OllamaEmbedRequest;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestBuilder;
|
import io.github.ollama4j.models.embed.OllamaEmbedRequestBuilder;
|
||||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestModel;
|
|
||||||
import io.github.ollama4j.utils.OptionsBuilder;
|
import io.github.ollama4j.utils.OptionsBuilder;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
class TestEmbedRequestSerialization extends AbstractSerializationTest<OllamaEmbedRequestModel> {
|
class TestEmbedRequestSerialization extends AbstractSerializationTest<OllamaEmbedRequest> {
|
||||||
|
|
||||||
private OllamaEmbedRequestBuilder builder;
|
private OllamaEmbedRequestBuilder builder;
|
||||||
|
|
||||||
@ -27,20 +27,18 @@ class TestEmbedRequestSerialization extends AbstractSerializationTest<OllamaEmbe
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRequestOnlyMandatoryFields() {
|
public void testRequestOnlyMandatoryFields() {
|
||||||
OllamaEmbedRequestModel req = builder.build();
|
OllamaEmbedRequest req = builder.build();
|
||||||
String jsonRequest = serialize(req);
|
String jsonRequest = serialize(req);
|
||||||
assertEqualsAfterUnmarshalling(
|
assertEqualsAfterUnmarshalling(deserialize(jsonRequest, OllamaEmbedRequest.class), req);
|
||||||
deserialize(jsonRequest, OllamaEmbedRequestModel.class), req);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRequestWithOptions() {
|
public void testRequestWithOptions() {
|
||||||
OptionsBuilder b = new OptionsBuilder();
|
OptionsBuilder b = new OptionsBuilder();
|
||||||
OllamaEmbedRequestModel req = builder.withOptions(b.setMirostat(1).build()).build();
|
OllamaEmbedRequest req = builder.withOptions(b.setMirostat(1).build()).build();
|
||||||
|
|
||||||
String jsonRequest = serialize(req);
|
String jsonRequest = serialize(req);
|
||||||
OllamaEmbedRequestModel deserializeRequest =
|
OllamaEmbedRequest deserializeRequest = deserialize(jsonRequest, OllamaEmbedRequest.class);
|
||||||
deserialize(jsonRequest, OllamaEmbedRequestModel.class);
|
|
||||||
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
assertEqualsAfterUnmarshalling(deserializeRequest, req);
|
||||||
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user