mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-09-16 03:39:05 +02:00
Add WeatherTool and integration test with auth proxy
Introduces WeatherTool for fetching weather data via OpenWeatherMap API and its tool specification. Adds an integration test (WithAuth) using Testcontainers to verify OllamaAPI connectivity through an NGINX proxy with bearer token authentication. Also updates pom.xml to include the testcontainers-nginx dependency and minor improvements to OllamaAPI for request headers and Javadoc formatting. TypewriterTextarea now supports text alignment, with homepage header using center alignment.
This commit is contained in:
parent
339f788832
commit
54d8cf4cd9
@ -1,6 +1,6 @@
|
||||
import React, { useEffect, useState, useRef } from 'react';
|
||||
|
||||
const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentences = 1000, height = '200px', width = '100%' }) => {
|
||||
const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentences = 1000, height = '200px', width = '100%', align = 'left' }) => {
|
||||
const [text, setText] = useState('');
|
||||
const [sentenceIndex, setSentenceIndex] = useState(0);
|
||||
const [charIndex, setCharIndex] = useState(0);
|
||||
@ -56,11 +56,13 @@ const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentenc
|
||||
fontSize: '1rem',
|
||||
backgroundColor: '#f4f4f4',
|
||||
border: '1px solid #ccc',
|
||||
textAlign: align,
|
||||
resize: 'none',
|
||||
whiteSpace: 'pre-wrap',
|
||||
color: 'black',
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default TypewriterTextarea;
|
||||
export default TypewriterTextarea;
|
@ -32,6 +32,7 @@ function HomepageHeader() {
|
||||
pauseBetweenSentences={1200}
|
||||
height='130px'
|
||||
width='100%'
|
||||
align='center'
|
||||
/>
|
||||
</div>
|
||||
<div className={styles.buttons} >
|
||||
|
6
pom.xml
6
pom.xml
@ -223,6 +223,12 @@
|
||||
<version>1.20.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>nginx</artifactId>
|
||||
<version>1.20.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<distributionManagement>
|
||||
|
@ -51,7 +51,7 @@ import java.util.stream.Collectors;
|
||||
/**
|
||||
* The base Ollama API class.
|
||||
*/
|
||||
@SuppressWarnings({ "DuplicatedCode", "resource" })
|
||||
@SuppressWarnings({"DuplicatedCode", "resource"})
|
||||
public class OllamaAPI {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||
@ -215,7 +215,7 @@ public class OllamaAPI {
|
||||
* tags, tag count, and the time when model was updated.
|
||||
*
|
||||
* @return A list of {@link LibraryModel} objects representing the models
|
||||
* available in the Ollama library.
|
||||
* available in the Ollama library.
|
||||
* @throws OllamaBaseException If the HTTP request fails or the response is not
|
||||
* successful (non-200 status code).
|
||||
* @throws IOException If an I/O error occurs during the HTTP request
|
||||
@ -281,7 +281,7 @@ public class OllamaAPI {
|
||||
* of the library model
|
||||
* for which the tags need to be fetched.
|
||||
* @return a list of {@link LibraryModelTag} objects containing the extracted
|
||||
* tags and their associated metadata.
|
||||
* tags and their associated metadata.
|
||||
* @throws OllamaBaseException if the HTTP response status code indicates an
|
||||
* error (i.e., not 200 OK),
|
||||
* or if there is any other issue during the
|
||||
@ -348,7 +348,7 @@ public class OllamaAPI {
|
||||
* @param modelName The name of the model to search for in the library.
|
||||
* @param tag The tag name to search for within the specified model.
|
||||
* @return The {@link LibraryModelTag} associated with the specified model and
|
||||
* tag.
|
||||
* tag.
|
||||
* @throws OllamaBaseException If there is a problem with the Ollama library
|
||||
* operations.
|
||||
* @throws IOException If an I/O error occurs during the operation.
|
||||
@ -755,7 +755,7 @@ public class OllamaAPI {
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
*/
|
||||
public OllamaResult generate(String model, String prompt, boolean raw, Options options,
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
|
||||
ollamaRequestModel.setRaw(raw);
|
||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||
@ -771,7 +771,7 @@ public class OllamaAPI {
|
||||
* @param format A map containing the format specification for the structured
|
||||
* output.
|
||||
* @return An instance of {@link OllamaResult} containing the structured
|
||||
* response.
|
||||
* response.
|
||||
* @throws OllamaBaseException if the response indicates an error status.
|
||||
* @throws IOException if an I/O error occurs during the HTTP request.
|
||||
* @throws InterruptedException if the operation is interrupted.
|
||||
@ -789,8 +789,9 @@ public class OllamaAPI {
|
||||
String jsonData = Utils.getObjectMapper().writeValueAsString(requestBody);
|
||||
HttpClient httpClient = HttpClient.newHttpClient();
|
||||
|
||||
HttpRequest request = HttpRequest.newBuilder(uri)
|
||||
.header("Content-Type", "application/json")
|
||||
HttpRequest request = getRequestBuilderDefault(uri)
|
||||
.header("Accept", "application/json")
|
||||
.header("Content-type", "application/json")
|
||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||
.build();
|
||||
|
||||
@ -845,8 +846,8 @@ public class OllamaAPI {
|
||||
* @param options Additional options or configurations to use when generating
|
||||
* the response.
|
||||
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the
|
||||
* response from the AI model and the results of invoking the tools on
|
||||
* that output.
|
||||
* response from the AI model and the results of invoking the tools on
|
||||
* that output.
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
@ -938,7 +939,7 @@ public class OllamaAPI {
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
*/
|
||||
public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options,
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
List<String> images = new ArrayList<>();
|
||||
for (File imageFile : imageFiles) {
|
||||
images.add(encodeFileToBase64(imageFile));
|
||||
@ -985,7 +986,7 @@ public class OllamaAPI {
|
||||
* @throws URISyntaxException if the URI for the request is malformed
|
||||
*/
|
||||
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options,
|
||||
OllamaStreamHandler streamHandler)
|
||||
OllamaStreamHandler streamHandler)
|
||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||
List<String> images = new ArrayList<>();
|
||||
for (String imageURL : imageURLs) {
|
||||
@ -1058,14 +1059,14 @@ public class OllamaAPI {
|
||||
* @param model the ollama model to ask the question to
|
||||
* @param messages chat history / message stack to send to the model
|
||||
* @return {@link OllamaChatResult} containing the api response and the message
|
||||
* history including the newly acquired assistant response.
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* history including the newly acquired assistant response.
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* @throws ToolInvocationException if the tool invocation fails
|
||||
*/
|
||||
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages)
|
||||
@ -1082,13 +1083,13 @@ public class OllamaAPI {
|
||||
*
|
||||
* @param request request object to be sent to the server
|
||||
* @return {@link OllamaChatResult}
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* @throws ToolInvocationException if the tool invocation fails
|
||||
*/
|
||||
public OllamaChatResult chat(OllamaChatRequest request)
|
||||
@ -1107,13 +1108,13 @@ public class OllamaAPI {
|
||||
* (caution: all previous tokens from stream will be
|
||||
* concatenated)
|
||||
* @return {@link OllamaChatResult}
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* @throws OllamaBaseException any response code than 200 has been returned
|
||||
* @throws IOException in case the responseStream can not be read
|
||||
* @throws InterruptedException in case the server is not reachable or network
|
||||
* issues happen
|
||||
* @throws OllamaBaseException if the response indicates an error status
|
||||
* @throws IOException if an I/O error occurs during the HTTP request
|
||||
* @throws InterruptedException if the operation is interrupted
|
||||
* @throws ToolInvocationException if the tool invocation fails
|
||||
*/
|
||||
public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler)
|
||||
@ -1247,7 +1248,7 @@ public class OllamaAPI {
|
||||
registerAnnotatedTools(provider.getDeclaredConstructor().newInstance());
|
||||
}
|
||||
} catch (InstantiationException | NoSuchMethodException | IllegalAccessException
|
||||
| InvocationTargetException e) {
|
||||
| InvocationTargetException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@ -1384,7 +1385,7 @@ public class OllamaAPI {
|
||||
* @throws InterruptedException if the thread is interrupted during the request.
|
||||
*/
|
||||
private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel,
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||
OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds,
|
||||
verbose);
|
||||
OllamaResult result;
|
||||
|
@ -0,0 +1,88 @@
|
||||
package io.github.ollama4j.tools.sampletools;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import io.github.ollama4j.tools.Tools;
|
||||
|
||||
public class WeatherTool {
|
||||
private String openWeatherMapAPIKey = null;
|
||||
|
||||
public WeatherTool(String openWeatherMapAPIKey) {
|
||||
this.openWeatherMapAPIKey = openWeatherMapAPIKey;
|
||||
}
|
||||
|
||||
public String getCurrentWeather(Map<String, Object> arguments) {
|
||||
String city = (String) arguments.get("cityName");
|
||||
System.out.println("Finding weather for city: " + city);
|
||||
|
||||
String url = String.format("https://api.openweathermap.org/data/2.5/weather?q=%s&appid=%s&units=metric",
|
||||
city,
|
||||
this.openWeatherMapAPIKey);
|
||||
|
||||
HttpClient client = HttpClient.newHttpClient();
|
||||
HttpRequest request = HttpRequest.newBuilder()
|
||||
.uri(URI.create(url))
|
||||
.build();
|
||||
try {
|
||||
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||
if (response.statusCode() == 200) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode root = mapper.readTree(response.body());
|
||||
JsonNode main = root.path("main");
|
||||
double temperature = main.path("temp").asDouble();
|
||||
String description = root.path("weather").get(0).path("description").asText();
|
||||
return String.format("Weather in %s: %.1f°C, %s", city, temperature, description);
|
||||
} else {
|
||||
return "Could not retrieve weather data for " + city + ". Status code: "
|
||||
+ response.statusCode();
|
||||
}
|
||||
} catch (IOException | InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
return "Error retrieving weather data: " + e.getMessage();
|
||||
}
|
||||
}
|
||||
|
||||
public Tools.ToolSpecification getSpecification() {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("weather-reporter")
|
||||
.functionDescription(
|
||||
"You are a tool who simply finds the city name from the user's message input/query about weather.")
|
||||
.toolFunction(this::getCurrentWeather)
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec
|
||||
.builder()
|
||||
.name("get-city-name")
|
||||
.description("Get the city name")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters
|
||||
.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"cityName",
|
||||
Tools.PromptFuncDefinition.Property
|
||||
.builder()
|
||||
.type("string")
|
||||
.description(
|
||||
"The name of the city. e.g. Bengaluru")
|
||||
.required(true)
|
||||
.build()))
|
||||
.required(java.util.List
|
||||
.of("cityName"))
|
||||
.build())
|
||||
.build())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
}
|
126
src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
Normal file
126
src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
Normal file
@ -0,0 +1,126 @@
|
||||
package io.github.ollama4j.integrationtests;
|
||||
|
||||
import io.github.ollama4j.OllamaAPI;
|
||||
import io.github.ollama4j.samples.AnnotatedTool;
|
||||
import io.github.ollama4j.tools.annotations.OllamaToolService;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.MethodOrderer.OrderAnnotation;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestMethodOrder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.testcontainers.containers.GenericContainer;
|
||||
import org.testcontainers.containers.NginxContainer;
|
||||
import org.testcontainers.containers.wait.strategy.Wait;
|
||||
import org.testcontainers.ollama.OllamaContainer;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
import org.testcontainers.utility.MountableFile;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.time.Duration;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@OllamaToolService(providers = {AnnotatedTool.class})
|
||||
@TestMethodOrder(OrderAnnotation.class)
|
||||
@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection", "resource", "ResultOfMethodCallIgnored"})
|
||||
public class WithAuth {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(WithAuth.class);
|
||||
private static final int NGINX_PORT = 80;
|
||||
private static final int OLLAMA_INTERNAL_PORT = 11434;
|
||||
private static final String OLLAMA_VERSION = "0.6.1";
|
||||
|
||||
private static OllamaContainer ollama;
|
||||
private static GenericContainer<?> nginx;
|
||||
private static OllamaAPI api;
|
||||
|
||||
@BeforeAll
|
||||
public static void setUp() {
|
||||
ollama = createOllamaContainer();
|
||||
ollama.start();
|
||||
|
||||
nginx = createNginxContainer(ollama.getMappedPort(OLLAMA_INTERNAL_PORT));
|
||||
nginx.start();
|
||||
|
||||
LOG.info("Using Testcontainer Ollama host...");
|
||||
|
||||
api = new OllamaAPI("http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT));
|
||||
api.setRequestTimeoutSeconds(120);
|
||||
api.setVerbose(true);
|
||||
api.setNumberOfRetriesForModelPull(3);
|
||||
}
|
||||
|
||||
private static OllamaContainer createOllamaContainer() {
|
||||
OllamaContainer container = new OllamaContainer("ollama/ollama:" + OLLAMA_VERSION);
|
||||
container.addExposedPort(OLLAMA_INTERNAL_PORT);
|
||||
return container;
|
||||
}
|
||||
|
||||
private static String generateNginxConfig(int ollamaPort) {
|
||||
return String.format("events {}\n" +
|
||||
"\n" +
|
||||
"http {\n" +
|
||||
" server {\n" +
|
||||
" listen 80;\n" +
|
||||
"\n" +
|
||||
" location / {\n" +
|
||||
" set $auth_header $http_authorization;\n" +
|
||||
"\n" +
|
||||
" if ($auth_header != \"Bearer secret-token\") {\n" +
|
||||
" return 401;\n" +
|
||||
" }\n" +
|
||||
"\n" +
|
||||
" proxy_pass http://host.docker.internal:%s/;\n" +
|
||||
" proxy_set_header Host $host;\n" +
|
||||
" proxy_set_header X-Real-IP $remote_addr;\n" +
|
||||
" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n" +
|
||||
" proxy_set_header X-Forwarded-Proto $scheme;\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n", ollamaPort);
|
||||
}
|
||||
|
||||
public static GenericContainer<?> createNginxContainer(int ollamaPort) {
|
||||
File nginxConf;
|
||||
|
||||
try {
|
||||
File tempDir = new File(System.getProperty("java.io.tmpdir"), "nginx-auth");
|
||||
if (!tempDir.exists()) tempDir.mkdirs();
|
||||
|
||||
nginxConf = new File(tempDir, "nginx.conf");
|
||||
try (FileWriter writer = new FileWriter(nginxConf)) {
|
||||
writer.write(generateNginxConfig(ollamaPort));
|
||||
}
|
||||
|
||||
return new NginxContainer<>(DockerImageName.parse("nginx:1.23.4-alpine"))
|
||||
.withExposedPorts(NGINX_PORT)
|
||||
.withCopyFileToContainer(
|
||||
MountableFile.forHostPath(nginxConf.getAbsolutePath()),
|
||||
"/etc/nginx/nginx.conf"
|
||||
)
|
||||
.withExtraHost("host.docker.internal", "host-gateway")
|
||||
.waitingFor(
|
||||
Wait.forHttp("/")
|
||||
.forStatusCode(401)
|
||||
.withStartupTimeout(Duration.ofSeconds(30))
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Failed to create nginx.conf", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(1)
|
||||
void testEndpoint() throws InterruptedException {
|
||||
String ollamaUrl = "http://" + ollama.getHost() + ":" + ollama.getMappedPort(OLLAMA_INTERNAL_PORT);
|
||||
String nginxUrl = "http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT);
|
||||
System.out.printf("Ollama service at %s is now accessible through the Nginx proxy at %s%n", ollamaUrl, nginxUrl);
|
||||
api.setBearerAuth("secret-token");
|
||||
Thread.sleep(1000);
|
||||
assertTrue(api.ping(), "OllamaAPI failed to ping through NGINX with auth.");
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user