forked from Mirror/ollama4j
Merge remote-tracking branch 'upstream/main' into feature/chat-request-model
This commit is contained in:
commit
f10c7ac725
2
pom.xml
2
pom.xml
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
<groupId>io.github.amithkoujalgi</groupId>
|
<groupId>io.github.amithkoujalgi</groupId>
|
||||||
<artifactId>ollama4j</artifactId>
|
<artifactId>ollama4j</artifactId>
|
||||||
<version>1.0.50-SNAPSHOT</version>
|
<version>1.0.51-SNAPSHOT</version>
|
||||||
|
|
||||||
<name>Ollama4j</name>
|
<name>Ollama4j</name>
|
||||||
<description>Java library for interacting with Ollama API.</description>
|
<description>Java library for interacting with Ollama API.</description>
|
||||||
|
@ -9,7 +9,6 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.types.OllamaModelType;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -20,26 +19,14 @@ import java.net.http.HttpConnectTimeoutException;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import lombok.Data;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Order;
|
import org.junit.jupiter.api.Order;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
class TestRealAPIs {
|
class TestRealAPIs {
|
||||||
OllamaAPI ollamaAPI;
|
OllamaAPI ollamaAPI;
|
||||||
|
Config config;
|
||||||
private Properties loadProperties() {
|
|
||||||
Properties properties = new Properties();
|
|
||||||
try (InputStream input =
|
|
||||||
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
|
||||||
if (input == null) {
|
|
||||||
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
|
||||||
}
|
|
||||||
properties.load(input);
|
|
||||||
return properties;
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException("Error loading properties", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private File getImageFileFromClasspath(String fileName) {
|
private File getImageFileFromClasspath(String fileName) {
|
||||||
ClassLoader classLoader = getClass().getClassLoader();
|
ClassLoader classLoader = getClass().getClassLoader();
|
||||||
@ -48,9 +35,9 @@ class TestRealAPIs {
|
|||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
void setUp() {
|
void setUp() {
|
||||||
Properties properties = loadProperties();
|
config = new Config();
|
||||||
ollamaAPI = new OllamaAPI(properties.getProperty("ollama.api.url"));
|
ollamaAPI = new OllamaAPI(config.getOllamaURL());
|
||||||
ollamaAPI.setRequestTimeoutSeconds(20);
|
ollamaAPI.setRequestTimeoutSeconds(config.getRequestTimeoutSeconds());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -89,10 +76,10 @@ class TestRealAPIs {
|
|||||||
void testPullModel() {
|
void testPullModel() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
ollamaAPI.pullModel(OllamaModelType.LLAMA2);
|
ollamaAPI.pullModel(config.getModel());
|
||||||
boolean found =
|
boolean found =
|
||||||
ollamaAPI.listModels().stream()
|
ollamaAPI.listModels().stream()
|
||||||
.anyMatch(model -> model.getModelName().equals(OllamaModelType.LLAMA2));
|
.anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
|
||||||
assertTrue(found);
|
assertTrue(found);
|
||||||
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
} catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
@ -106,7 +93,7 @@ class TestRealAPIs {
|
|||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.generate(
|
ollamaAPI.generate(
|
||||||
OllamaModelType.LLAMA2,
|
config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().build());
|
new OptionsBuilder().build());
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
@ -124,7 +111,7 @@ class TestRealAPIs {
|
|||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.generate(
|
ollamaAPI.generate(
|
||||||
OllamaModelType.LLAMA2,
|
config.getModel(),
|
||||||
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
"What is the capital of France? And what's France's connection with Mona Lisa?",
|
||||||
new OptionsBuilder().setTemperature(0.9f).build());
|
new OptionsBuilder().setTemperature(0.9f).build());
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
@ -140,7 +127,7 @@ class TestRealAPIs {
|
|||||||
void testChat() {
|
void testChat() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Say hello to my little friend")
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER, "Say hello to my little friend")
|
||||||
.withMessage(OllamaChatMessageRole.ASSISTANT, "Seems to be a Tony Montana montage!")
|
.withMessage(OllamaChatMessageRole.ASSISTANT, "Seems to be a Tony Montana montage!")
|
||||||
.withMessage(OllamaChatMessageRole.USER,"We need a montage!")
|
.withMessage(OllamaChatMessageRole.USER,"We need a montage!")
|
||||||
@ -160,7 +147,7 @@ class TestRealAPIs {
|
|||||||
void testChatWithSystemPrompt() {
|
void testChatWithSystemPrompt() {
|
||||||
testEndpointReachability();
|
testEndpointReachability();
|
||||||
try {
|
try {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
|
||||||
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
|
||||||
.withMessage(OllamaChatMessageRole.USER,"We need a montage!")
|
.withMessage(OllamaChatMessageRole.USER,"We need a montage!")
|
||||||
.build();
|
.build();
|
||||||
@ -183,7 +170,7 @@ class TestRealAPIs {
|
|||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.generateWithImageFiles(
|
ollamaAPI.generateWithImageFiles(
|
||||||
OllamaModelType.LLAVA,
|
config.getImageModel(),
|
||||||
"What is in this image?",
|
"What is in this image?",
|
||||||
List.of(imageFile),
|
List.of(imageFile),
|
||||||
new OptionsBuilder().build());
|
new OptionsBuilder().build());
|
||||||
@ -202,7 +189,7 @@ class TestRealAPIs {
|
|||||||
try {
|
try {
|
||||||
OllamaResult result =
|
OllamaResult result =
|
||||||
ollamaAPI.generateWithImageURLs(
|
ollamaAPI.generateWithImageURLs(
|
||||||
OllamaModelType.LLAVA,
|
config.getImageModel(),
|
||||||
"What is in this image?",
|
"What is in this image?",
|
||||||
List.of(
|
List.of(
|
||||||
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
"https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
|
||||||
@ -215,3 +202,29 @@ class TestRealAPIs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
class Config {
|
||||||
|
private String ollamaURL;
|
||||||
|
private String model;
|
||||||
|
private String imageModel;
|
||||||
|
private int requestTimeoutSeconds;
|
||||||
|
|
||||||
|
public Config() {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
try (InputStream input =
|
||||||
|
getClass().getClassLoader().getResourceAsStream("test-config.properties")) {
|
||||||
|
if (input == null) {
|
||||||
|
throw new RuntimeException("Sorry, unable to find test-config.properties");
|
||||||
|
}
|
||||||
|
properties.load(input);
|
||||||
|
this.ollamaURL = properties.getProperty("ollama.url");
|
||||||
|
this.model = properties.getProperty("ollama.model");
|
||||||
|
this.imageModel = properties.getProperty("ollama.model.image");
|
||||||
|
this.requestTimeoutSeconds =
|
||||||
|
Integer.parseInt(properties.getProperty("ollama.request-timeout-seconds"));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException("Error loading properties", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,2 +1,4 @@
|
|||||||
ollama.api.url=http://192.168.29.223:11434
|
ollama.url=http://localhost:11434
|
||||||
ollama.model=llava
|
ollama.model=qwen:0.5b
|
||||||
|
ollama.model.image=llava
|
||||||
|
ollama.request-timeout-seconds=120
|
Loading…
x
Reference in New Issue
Block a user