mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-09-16 11:48:58 +02:00
Add WeatherTool and integration test with auth proxy
Introduces WeatherTool for fetching weather data via OpenWeatherMap API and its tool specification. Adds an integration test (WithAuth) using Testcontainers to verify OllamaAPI connectivity through an NGINX proxy with bearer token authentication. Also updates pom.xml to include the testcontainers-nginx dependency and minor improvements to OllamaAPI for request headers and Javadoc formatting. TypewriterTextarea now supports text alignment, with homepage header using center alignment.
This commit is contained in:
parent
339f788832
commit
54d8cf4cd9
@ -1,6 +1,6 @@
|
|||||||
import React, { useEffect, useState, useRef } from 'react';
|
import React, { useEffect, useState, useRef } from 'react';
|
||||||
|
|
||||||
const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentences = 1000, height = '200px', width = '100%' }) => {
|
const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentences = 1000, height = '200px', width = '100%', align = 'left' }) => {
|
||||||
const [text, setText] = useState('');
|
const [text, setText] = useState('');
|
||||||
const [sentenceIndex, setSentenceIndex] = useState(0);
|
const [sentenceIndex, setSentenceIndex] = useState(0);
|
||||||
const [charIndex, setCharIndex] = useState(0);
|
const [charIndex, setCharIndex] = useState(0);
|
||||||
@ -56,8 +56,10 @@ const TypewriterTextarea = ({ textContent, typingSpeed = 50, pauseBetweenSentenc
|
|||||||
fontSize: '1rem',
|
fontSize: '1rem',
|
||||||
backgroundColor: '#f4f4f4',
|
backgroundColor: '#f4f4f4',
|
||||||
border: '1px solid #ccc',
|
border: '1px solid #ccc',
|
||||||
|
textAlign: align,
|
||||||
resize: 'none',
|
resize: 'none',
|
||||||
whiteSpace: 'pre-wrap',
|
whiteSpace: 'pre-wrap',
|
||||||
|
color: 'black',
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
@ -32,6 +32,7 @@ function HomepageHeader() {
|
|||||||
pauseBetweenSentences={1200}
|
pauseBetweenSentences={1200}
|
||||||
height='130px'
|
height='130px'
|
||||||
width='100%'
|
width='100%'
|
||||||
|
align='center'
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className={styles.buttons} >
|
<div className={styles.buttons} >
|
||||||
|
6
pom.xml
6
pom.xml
@ -223,6 +223,12 @@
|
|||||||
<version>1.20.2</version>
|
<version>1.20.2</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testcontainers</groupId>
|
||||||
|
<artifactId>nginx</artifactId>
|
||||||
|
<version>1.20.0</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<distributionManagement>
|
<distributionManagement>
|
||||||
|
@ -51,7 +51,7 @@ import java.util.stream.Collectors;
|
|||||||
/**
|
/**
|
||||||
* The base Ollama API class.
|
* The base Ollama API class.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings({ "DuplicatedCode", "resource" })
|
@SuppressWarnings({"DuplicatedCode", "resource"})
|
||||||
public class OllamaAPI {
|
public class OllamaAPI {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
@ -215,7 +215,7 @@ public class OllamaAPI {
|
|||||||
* tags, tag count, and the time when model was updated.
|
* tags, tag count, and the time when model was updated.
|
||||||
*
|
*
|
||||||
* @return A list of {@link LibraryModel} objects representing the models
|
* @return A list of {@link LibraryModel} objects representing the models
|
||||||
* available in the Ollama library.
|
* available in the Ollama library.
|
||||||
* @throws OllamaBaseException If the HTTP request fails or the response is not
|
* @throws OllamaBaseException If the HTTP request fails or the response is not
|
||||||
* successful (non-200 status code).
|
* successful (non-200 status code).
|
||||||
* @throws IOException If an I/O error occurs during the HTTP request
|
* @throws IOException If an I/O error occurs during the HTTP request
|
||||||
@ -281,7 +281,7 @@ public class OllamaAPI {
|
|||||||
* of the library model
|
* of the library model
|
||||||
* for which the tags need to be fetched.
|
* for which the tags need to be fetched.
|
||||||
* @return a list of {@link LibraryModelTag} objects containing the extracted
|
* @return a list of {@link LibraryModelTag} objects containing the extracted
|
||||||
* tags and their associated metadata.
|
* tags and their associated metadata.
|
||||||
* @throws OllamaBaseException if the HTTP response status code indicates an
|
* @throws OllamaBaseException if the HTTP response status code indicates an
|
||||||
* error (i.e., not 200 OK),
|
* error (i.e., not 200 OK),
|
||||||
* or if there is any other issue during the
|
* or if there is any other issue during the
|
||||||
@ -348,7 +348,7 @@ public class OllamaAPI {
|
|||||||
* @param modelName The name of the model to search for in the library.
|
* @param modelName The name of the model to search for in the library.
|
||||||
* @param tag The tag name to search for within the specified model.
|
* @param tag The tag name to search for within the specified model.
|
||||||
* @return The {@link LibraryModelTag} associated with the specified model and
|
* @return The {@link LibraryModelTag} associated with the specified model and
|
||||||
* tag.
|
* tag.
|
||||||
* @throws OllamaBaseException If there is a problem with the Ollama library
|
* @throws OllamaBaseException If there is a problem with the Ollama library
|
||||||
* operations.
|
* operations.
|
||||||
* @throws IOException If an I/O error occurs during the operation.
|
* @throws IOException If an I/O error occurs during the operation.
|
||||||
@ -755,7 +755,7 @@ public class OllamaAPI {
|
|||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
*/
|
*/
|
||||||
public OllamaResult generate(String model, String prompt, boolean raw, Options options,
|
public OllamaResult generate(String model, String prompt, boolean raw, Options options,
|
||||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
|
OllamaGenerateRequest ollamaRequestModel = new OllamaGenerateRequest(model, prompt);
|
||||||
ollamaRequestModel.setRaw(raw);
|
ollamaRequestModel.setRaw(raw);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
@ -771,7 +771,7 @@ public class OllamaAPI {
|
|||||||
* @param format A map containing the format specification for the structured
|
* @param format A map containing the format specification for the structured
|
||||||
* output.
|
* output.
|
||||||
* @return An instance of {@link OllamaResult} containing the structured
|
* @return An instance of {@link OllamaResult} containing the structured
|
||||||
* response.
|
* response.
|
||||||
* @throws OllamaBaseException if the response indicates an error status.
|
* @throws OllamaBaseException if the response indicates an error status.
|
||||||
* @throws IOException if an I/O error occurs during the HTTP request.
|
* @throws IOException if an I/O error occurs during the HTTP request.
|
||||||
* @throws InterruptedException if the operation is interrupted.
|
* @throws InterruptedException if the operation is interrupted.
|
||||||
@ -789,8 +789,9 @@ public class OllamaAPI {
|
|||||||
String jsonData = Utils.getObjectMapper().writeValueAsString(requestBody);
|
String jsonData = Utils.getObjectMapper().writeValueAsString(requestBody);
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
|
|
||||||
HttpRequest request = HttpRequest.newBuilder(uri)
|
HttpRequest request = getRequestBuilderDefault(uri)
|
||||||
.header("Content-Type", "application/json")
|
.header("Accept", "application/json")
|
||||||
|
.header("Content-type", "application/json")
|
||||||
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
.POST(HttpRequest.BodyPublishers.ofString(jsonData))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
@ -845,8 +846,8 @@ public class OllamaAPI {
|
|||||||
* @param options Additional options or configurations to use when generating
|
* @param options Additional options or configurations to use when generating
|
||||||
* the response.
|
* the response.
|
||||||
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the
|
* @return {@link OllamaToolsResult} An OllamaToolsResult object containing the
|
||||||
* response from the AI model and the results of invoking the tools on
|
* response from the AI model and the results of invoking the tools on
|
||||||
* that output.
|
* that output.
|
||||||
* @throws OllamaBaseException if the response indicates an error status
|
* @throws OllamaBaseException if the response indicates an error status
|
||||||
* @throws IOException if an I/O error occurs during the HTTP request
|
* @throws IOException if an I/O error occurs during the HTTP request
|
||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
@ -938,7 +939,7 @@ public class OllamaAPI {
|
|||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
*/
|
*/
|
||||||
public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options,
|
public OllamaResult generateWithImageFiles(String model, String prompt, List<File> imageFiles, Options options,
|
||||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
List<String> images = new ArrayList<>();
|
List<String> images = new ArrayList<>();
|
||||||
for (File imageFile : imageFiles) {
|
for (File imageFile : imageFiles) {
|
||||||
images.add(encodeFileToBase64(imageFile));
|
images.add(encodeFileToBase64(imageFile));
|
||||||
@ -985,7 +986,7 @@ public class OllamaAPI {
|
|||||||
* @throws URISyntaxException if the URI for the request is malformed
|
* @throws URISyntaxException if the URI for the request is malformed
|
||||||
*/
|
*/
|
||||||
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options,
|
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs, Options options,
|
||||||
OllamaStreamHandler streamHandler)
|
OllamaStreamHandler streamHandler)
|
||||||
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
|
||||||
List<String> images = new ArrayList<>();
|
List<String> images = new ArrayList<>();
|
||||||
for (String imageURL : imageURLs) {
|
for (String imageURL : imageURLs) {
|
||||||
@ -1058,14 +1059,14 @@ public class OllamaAPI {
|
|||||||
* @param model the ollama model to ask the question to
|
* @param model the ollama model to ask the question to
|
||||||
* @param messages chat history / message stack to send to the model
|
* @param messages chat history / message stack to send to the model
|
||||||
* @return {@link OllamaChatResult} containing the api response and the message
|
* @return {@link OllamaChatResult} containing the api response and the message
|
||||||
* history including the newly acquired assistant response.
|
* history including the newly acquired assistant response.
|
||||||
* @throws OllamaBaseException any response code than 200 has been returned
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network
|
* @throws InterruptedException in case the server is not reachable or network
|
||||||
* issues happen
|
* issues happen
|
||||||
* @throws OllamaBaseException if the response indicates an error status
|
* @throws OllamaBaseException if the response indicates an error status
|
||||||
* @throws IOException if an I/O error occurs during the HTTP request
|
* @throws IOException if an I/O error occurs during the HTTP request
|
||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
* @throws ToolInvocationException if the tool invocation fails
|
* @throws ToolInvocationException if the tool invocation fails
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages)
|
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages)
|
||||||
@ -1082,13 +1083,13 @@ public class OllamaAPI {
|
|||||||
*
|
*
|
||||||
* @param request request object to be sent to the server
|
* @param request request object to be sent to the server
|
||||||
* @return {@link OllamaChatResult}
|
* @return {@link OllamaChatResult}
|
||||||
* @throws OllamaBaseException any response code than 200 has been returned
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network
|
* @throws InterruptedException in case the server is not reachable or network
|
||||||
* issues happen
|
* issues happen
|
||||||
* @throws OllamaBaseException if the response indicates an error status
|
* @throws OllamaBaseException if the response indicates an error status
|
||||||
* @throws IOException if an I/O error occurs during the HTTP request
|
* @throws IOException if an I/O error occurs during the HTTP request
|
||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
* @throws ToolInvocationException if the tool invocation fails
|
* @throws ToolInvocationException if the tool invocation fails
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(OllamaChatRequest request)
|
public OllamaChatResult chat(OllamaChatRequest request)
|
||||||
@ -1107,13 +1108,13 @@ public class OllamaAPI {
|
|||||||
* (caution: all previous tokens from stream will be
|
* (caution: all previous tokens from stream will be
|
||||||
* concatenated)
|
* concatenated)
|
||||||
* @return {@link OllamaChatResult}
|
* @return {@link OllamaChatResult}
|
||||||
* @throws OllamaBaseException any response code than 200 has been returned
|
* @throws OllamaBaseException any response code than 200 has been returned
|
||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network
|
* @throws InterruptedException in case the server is not reachable or network
|
||||||
* issues happen
|
* issues happen
|
||||||
* @throws OllamaBaseException if the response indicates an error status
|
* @throws OllamaBaseException if the response indicates an error status
|
||||||
* @throws IOException if an I/O error occurs during the HTTP request
|
* @throws IOException if an I/O error occurs during the HTTP request
|
||||||
* @throws InterruptedException if the operation is interrupted
|
* @throws InterruptedException if the operation is interrupted
|
||||||
* @throws ToolInvocationException if the tool invocation fails
|
* @throws ToolInvocationException if the tool invocation fails
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler)
|
public OllamaChatResult chat(OllamaChatRequest request, OllamaStreamHandler streamHandler)
|
||||||
@ -1247,7 +1248,7 @@ public class OllamaAPI {
|
|||||||
registerAnnotatedTools(provider.getDeclaredConstructor().newInstance());
|
registerAnnotatedTools(provider.getDeclaredConstructor().newInstance());
|
||||||
}
|
}
|
||||||
} catch (InstantiationException | NoSuchMethodException | IllegalAccessException
|
} catch (InstantiationException | NoSuchMethodException | IllegalAccessException
|
||||||
| InvocationTargetException e) {
|
| InvocationTargetException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1384,7 +1385,7 @@ public class OllamaAPI {
|
|||||||
* @throws InterruptedException if the thread is interrupted during the request.
|
* @throws InterruptedException if the thread is interrupted during the request.
|
||||||
*/
|
*/
|
||||||
private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel,
|
private OllamaResult generateSyncForOllamaRequestModel(OllamaGenerateRequest ollamaRequestModel,
|
||||||
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds,
|
OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, auth, requestTimeoutSeconds,
|
||||||
verbose);
|
verbose);
|
||||||
OllamaResult result;
|
OllamaResult result;
|
||||||
|
@ -0,0 +1,88 @@
|
|||||||
|
package io.github.ollama4j.tools.sampletools;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.http.HttpClient;
|
||||||
|
import java.net.http.HttpRequest;
|
||||||
|
import java.net.http.HttpResponse;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import io.github.ollama4j.tools.Tools;
|
||||||
|
|
||||||
|
public class WeatherTool {
|
||||||
|
private String openWeatherMapAPIKey = null;
|
||||||
|
|
||||||
|
public WeatherTool(String openWeatherMapAPIKey) {
|
||||||
|
this.openWeatherMapAPIKey = openWeatherMapAPIKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCurrentWeather(Map<String, Object> arguments) {
|
||||||
|
String city = (String) arguments.get("cityName");
|
||||||
|
System.out.println("Finding weather for city: " + city);
|
||||||
|
|
||||||
|
String url = String.format("https://api.openweathermap.org/data/2.5/weather?q=%s&appid=%s&units=metric",
|
||||||
|
city,
|
||||||
|
this.openWeatherMapAPIKey);
|
||||||
|
|
||||||
|
HttpClient client = HttpClient.newHttpClient();
|
||||||
|
HttpRequest request = HttpRequest.newBuilder()
|
||||||
|
.uri(URI.create(url))
|
||||||
|
.build();
|
||||||
|
try {
|
||||||
|
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
|
if (response.statusCode() == 200) {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
JsonNode root = mapper.readTree(response.body());
|
||||||
|
JsonNode main = root.path("main");
|
||||||
|
double temperature = main.path("temp").asDouble();
|
||||||
|
String description = root.path("weather").get(0).path("description").asText();
|
||||||
|
return String.format("Weather in %s: %.1f°C, %s", city, temperature, description);
|
||||||
|
} else {
|
||||||
|
return "Could not retrieve weather data for " + city + ". Status code: "
|
||||||
|
+ response.statusCode();
|
||||||
|
}
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return "Error retrieving weather data: " + e.getMessage();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Tools.ToolSpecification getSpecification() {
|
||||||
|
return Tools.ToolSpecification.builder()
|
||||||
|
.functionName("weather-reporter")
|
||||||
|
.functionDescription(
|
||||||
|
"You are a tool who simply finds the city name from the user's message input/query about weather.")
|
||||||
|
.toolFunction(this::getCurrentWeather)
|
||||||
|
.toolPrompt(
|
||||||
|
Tools.PromptFuncDefinition.builder()
|
||||||
|
.type("prompt")
|
||||||
|
.function(
|
||||||
|
Tools.PromptFuncDefinition.PromptFuncSpec
|
||||||
|
.builder()
|
||||||
|
.name("get-city-name")
|
||||||
|
.description("Get the city name")
|
||||||
|
.parameters(
|
||||||
|
Tools.PromptFuncDefinition.Parameters
|
||||||
|
.builder()
|
||||||
|
.type("object")
|
||||||
|
.properties(
|
||||||
|
Map.of(
|
||||||
|
"cityName",
|
||||||
|
Tools.PromptFuncDefinition.Property
|
||||||
|
.builder()
|
||||||
|
.type("string")
|
||||||
|
.description(
|
||||||
|
"The name of the city. e.g. Bengaluru")
|
||||||
|
.required(true)
|
||||||
|
.build()))
|
||||||
|
.required(java.util.List
|
||||||
|
.of("cityName"))
|
||||||
|
.build())
|
||||||
|
.build())
|
||||||
|
.build())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
}
|
126
src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
Normal file
126
src/test/java/io/github/ollama4j/integrationtests/WithAuth.java
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
package io.github.ollama4j.integrationtests;
|
||||||
|
|
||||||
|
import io.github.ollama4j.OllamaAPI;
|
||||||
|
import io.github.ollama4j.samples.AnnotatedTool;
|
||||||
|
import io.github.ollama4j.tools.annotations.OllamaToolService;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.MethodOrderer.OrderAnnotation;
|
||||||
|
import org.junit.jupiter.api.Order;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.TestMethodOrder;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.testcontainers.containers.GenericContainer;
|
||||||
|
import org.testcontainers.containers.NginxContainer;
|
||||||
|
import org.testcontainers.containers.wait.strategy.Wait;
|
||||||
|
import org.testcontainers.ollama.OllamaContainer;
|
||||||
|
import org.testcontainers.utility.DockerImageName;
|
||||||
|
import org.testcontainers.utility.MountableFile;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileWriter;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.time.Duration;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
@OllamaToolService(providers = {AnnotatedTool.class})
|
||||||
|
@TestMethodOrder(OrderAnnotation.class)
|
||||||
|
@SuppressWarnings({"HttpUrlsUsage", "SpellCheckingInspection", "resource", "ResultOfMethodCallIgnored"})
|
||||||
|
public class WithAuth {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(WithAuth.class);
|
||||||
|
private static final int NGINX_PORT = 80;
|
||||||
|
private static final int OLLAMA_INTERNAL_PORT = 11434;
|
||||||
|
private static final String OLLAMA_VERSION = "0.6.1";
|
||||||
|
|
||||||
|
private static OllamaContainer ollama;
|
||||||
|
private static GenericContainer<?> nginx;
|
||||||
|
private static OllamaAPI api;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void setUp() {
|
||||||
|
ollama = createOllamaContainer();
|
||||||
|
ollama.start();
|
||||||
|
|
||||||
|
nginx = createNginxContainer(ollama.getMappedPort(OLLAMA_INTERNAL_PORT));
|
||||||
|
nginx.start();
|
||||||
|
|
||||||
|
LOG.info("Using Testcontainer Ollama host...");
|
||||||
|
|
||||||
|
api = new OllamaAPI("http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT));
|
||||||
|
api.setRequestTimeoutSeconds(120);
|
||||||
|
api.setVerbose(true);
|
||||||
|
api.setNumberOfRetriesForModelPull(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static OllamaContainer createOllamaContainer() {
|
||||||
|
OllamaContainer container = new OllamaContainer("ollama/ollama:" + OLLAMA_VERSION);
|
||||||
|
container.addExposedPort(OLLAMA_INTERNAL_PORT);
|
||||||
|
return container;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String generateNginxConfig(int ollamaPort) {
|
||||||
|
return String.format("events {}\n" +
|
||||||
|
"\n" +
|
||||||
|
"http {\n" +
|
||||||
|
" server {\n" +
|
||||||
|
" listen 80;\n" +
|
||||||
|
"\n" +
|
||||||
|
" location / {\n" +
|
||||||
|
" set $auth_header $http_authorization;\n" +
|
||||||
|
"\n" +
|
||||||
|
" if ($auth_header != \"Bearer secret-token\") {\n" +
|
||||||
|
" return 401;\n" +
|
||||||
|
" }\n" +
|
||||||
|
"\n" +
|
||||||
|
" proxy_pass http://host.docker.internal:%s/;\n" +
|
||||||
|
" proxy_set_header Host $host;\n" +
|
||||||
|
" proxy_set_header X-Real-IP $remote_addr;\n" +
|
||||||
|
" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n" +
|
||||||
|
" proxy_set_header X-Forwarded-Proto $scheme;\n" +
|
||||||
|
" }\n" +
|
||||||
|
" }\n" +
|
||||||
|
"}\n", ollamaPort);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GenericContainer<?> createNginxContainer(int ollamaPort) {
|
||||||
|
File nginxConf;
|
||||||
|
|
||||||
|
try {
|
||||||
|
File tempDir = new File(System.getProperty("java.io.tmpdir"), "nginx-auth");
|
||||||
|
if (!tempDir.exists()) tempDir.mkdirs();
|
||||||
|
|
||||||
|
nginxConf = new File(tempDir, "nginx.conf");
|
||||||
|
try (FileWriter writer = new FileWriter(nginxConf)) {
|
||||||
|
writer.write(generateNginxConfig(ollamaPort));
|
||||||
|
}
|
||||||
|
|
||||||
|
return new NginxContainer<>(DockerImageName.parse("nginx:1.23.4-alpine"))
|
||||||
|
.withExposedPorts(NGINX_PORT)
|
||||||
|
.withCopyFileToContainer(
|
||||||
|
MountableFile.forHostPath(nginxConf.getAbsolutePath()),
|
||||||
|
"/etc/nginx/nginx.conf"
|
||||||
|
)
|
||||||
|
.withExtraHost("host.docker.internal", "host-gateway")
|
||||||
|
.waitingFor(
|
||||||
|
Wait.forHttp("/")
|
||||||
|
.forStatusCode(401)
|
||||||
|
.withStartupTimeout(Duration.ofSeconds(30))
|
||||||
|
);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException("Failed to create nginx.conf", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(1)
|
||||||
|
void testEndpoint() throws InterruptedException {
|
||||||
|
String ollamaUrl = "http://" + ollama.getHost() + ":" + ollama.getMappedPort(OLLAMA_INTERNAL_PORT);
|
||||||
|
String nginxUrl = "http://" + nginx.getHost() + ":" + nginx.getMappedPort(NGINX_PORT);
|
||||||
|
System.out.printf("Ollama service at %s is now accessible through the Nginx proxy at %s%n", ollamaUrl, nginxUrl);
|
||||||
|
api.setBearerAuth("secret-token");
|
||||||
|
Thread.sleep(1000);
|
||||||
|
assertTrue(api.ping(), "OllamaAPI failed to ping through NGINX with auth.");
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user