mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-10-13 17:08:57 +02:00
Refactor OllamaAPI and related classes for improved functionality and code clarity
This update removes deprecated methods from the OllamaAPI class, enhancing the overall structure and readability. The OllamaGenerateRequest class has been updated to include a list of tools, and the generate methods have been refactored to streamline request handling. Additionally, the WeatherTool class has been removed, and a new sample tool specification has been added for demonstration purposes. Changes in pom.xml include commented-out dependencies for better clarity.
This commit is contained in:
parent
53e4b413ec
commit
fe82550637
9
pom.xml
9
pom.xml
@ -275,6 +275,15 @@
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.17</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <!– Logger impl - Logback Classic (SLF4J binding) - Disabled on JAR build –>-->
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>ch.qos.logback</groupId>-->
|
||||
<!-- <artifactId>logback-classic</artifactId>-->
|
||||
<!-- <version>1.4.12</version>-->
|
||||
<!-- <scope>test</scope>-->
|
||||
<!-- </dependency>-->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
|
@ -8,7 +8,6 @@
|
||||
*/
|
||||
package io.github.ollama4j;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.github.ollama4j.exceptions.OllamaBaseException;
|
||||
import io.github.ollama4j.exceptions.RoleNotFoundException;
|
||||
@ -20,7 +19,6 @@ import io.github.ollama4j.models.chat.OllamaChatTokenHandler;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
|
||||
import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
|
||||
import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler;
|
||||
import io.github.ollama4j.models.ps.ModelsProcessResponse;
|
||||
@ -31,7 +29,6 @@ import io.github.ollama4j.tools.annotations.OllamaToolService;
|
||||
import io.github.ollama4j.tools.annotations.ToolProperty;
|
||||
import io.github.ollama4j.tools.annotations.ToolSpec;
|
||||
import io.github.ollama4j.utils.Constants;
|
||||
import io.github.ollama4j.utils.Options;
|
||||
import io.github.ollama4j.utils.Utils;
|
||||
import java.io.*;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
@ -730,39 +727,6 @@ public class OllamaAPI {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a response from a model using the specified parameters and stream observer.
|
||||
*
|
||||
* @param model the model name
|
||||
* @param prompt the prompt to send
|
||||
* @param raw whether to return the raw response
|
||||
* @param think whether to stream "thinking" tokens
|
||||
* @param options additional options
|
||||
* @param streamObserver the stream observer for handling streamed responses
|
||||
* @return the OllamaResult containing the response
|
||||
* @throws OllamaBaseException if the request fails
|
||||
*/
|
||||
@Deprecated
|
||||
private OllamaResult generate(
|
||||
String model,
|
||||
String prompt,
|
||||
boolean raw,
|
||||
boolean think,
|
||||
Options options,
|
||||
OllamaGenerateStreamObserver streamObserver)
|
||||
throws OllamaBaseException {
|
||||
OllamaGenerateRequest request =
|
||||
OllamaGenerateRequestBuilder.builder()
|
||||
.withModel(model)
|
||||
.withPrompt(prompt)
|
||||
.withRaw(raw)
|
||||
.withThink(think)
|
||||
.withOptions(options)
|
||||
.withKeepAlive("0m")
|
||||
.build();
|
||||
return generate(request, streamObserver);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a response from a model using the specified parameters and stream observer. If
|
||||
* {@code streamObserver} is provided, streaming is enabled; otherwise, a synchronous call is
|
||||
@ -796,179 +760,34 @@ public class OllamaAPI {
|
||||
private OllamaResult generateWithToolsInternal(
|
||||
OllamaGenerateRequest request, OllamaGenerateStreamObserver streamObserver)
|
||||
throws OllamaBaseException {
|
||||
try {
|
||||
boolean raw = true;
|
||||
OllamaToolsResult toolResult = new OllamaToolsResult();
|
||||
Map<ToolFunctionCallSpec, Object> toolResults = new HashMap<>();
|
||||
|
||||
String prompt = request.getPrompt();
|
||||
if (!prompt.startsWith("[AVAILABLE_TOOLS]")) {
|
||||
final Tools.PromptBuilder promptBuilder = new Tools.PromptBuilder();
|
||||
for (Tools.ToolSpecification spec : toolRegistry.getRegisteredSpecs()) {
|
||||
promptBuilder.withToolSpecification(spec);
|
||||
}
|
||||
promptBuilder.withPrompt(prompt);
|
||||
prompt = promptBuilder.build();
|
||||
}
|
||||
|
||||
request.setPrompt(prompt);
|
||||
request.setRaw(raw);
|
||||
request.setThink(false);
|
||||
|
||||
OllamaResult result =
|
||||
generate(
|
||||
request,
|
||||
new OllamaGenerateStreamObserver(
|
||||
null,
|
||||
streamObserver != null
|
||||
? streamObserver.getResponseStreamHandler()
|
||||
: null));
|
||||
toolResult.setModelResult(result);
|
||||
|
||||
String toolsResponse = result.getResponse();
|
||||
if (toolsResponse.contains("[TOOL_CALLS]")) {
|
||||
toolsResponse = toolsResponse.replace("[TOOL_CALLS]", "");
|
||||
}
|
||||
|
||||
List<ToolFunctionCallSpec> toolFunctionCallSpecs = new ArrayList<>();
|
||||
ObjectMapper objectMapper = Utils.getObjectMapper();
|
||||
|
||||
if (!toolsResponse.isEmpty()) {
|
||||
try {
|
||||
objectMapper.readTree(toolsResponse);
|
||||
} catch (JsonParseException e) {
|
||||
return result;
|
||||
}
|
||||
toolFunctionCallSpecs =
|
||||
objectMapper.readValue(
|
||||
toolsResponse,
|
||||
objectMapper
|
||||
.getTypeFactory()
|
||||
.constructCollectionType(
|
||||
List.class, ToolFunctionCallSpec.class));
|
||||
}
|
||||
for (ToolFunctionCallSpec toolFunctionCallSpec : toolFunctionCallSpecs) {
|
||||
toolResults.put(toolFunctionCallSpec, invokeTool(toolFunctionCallSpec));
|
||||
}
|
||||
toolResult.setToolResults(toolResults);
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw new OllamaBaseException(e.getMessage(), e);
|
||||
List<Tools.PromptFuncDefinition> tools = new ArrayList<>();
|
||||
for (Tools.ToolSpecification spec : toolRegistry.getRegisteredSpecs()) {
|
||||
tools.add(spec.getToolPrompt());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates structured output from the specified AI model and prompt.
|
||||
*
|
||||
* <p>Note: When formatting is specified, the 'think' parameter is not allowed.
|
||||
*
|
||||
* @param model The name or identifier of the AI model to use for generating the response.
|
||||
* @param prompt The input text or prompt to provide to the AI model.
|
||||
* @param format A map containing the format specification for the structured output.
|
||||
* @return An instance of {@link OllamaResult} containing the structured response.
|
||||
* @throws OllamaBaseException if the response indicates an error status.
|
||||
*/
|
||||
@Deprecated
|
||||
@SuppressWarnings("LoggingSimilarMessage")
|
||||
private OllamaResult generateWithFormat(String model, String prompt, Map<String, Object> format)
|
||||
throws OllamaBaseException {
|
||||
OllamaGenerateRequest request =
|
||||
OllamaGenerateRequestBuilder.builder()
|
||||
.withModel(model)
|
||||
.withPrompt(prompt)
|
||||
.withFormat(format)
|
||||
.withThink(false)
|
||||
.build();
|
||||
return generate(request, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a response using the specified AI model and prompt, then automatically detects and
|
||||
* invokes any tool calls present in the model's output.
|
||||
*
|
||||
* <p>This method operates in blocking mode. It first augments the prompt with all registered
|
||||
* tool specifications (unless the prompt already begins with {@code [AVAILABLE_TOOLS]}), sends
|
||||
* the prompt to the model, and parses the model's response for tool call instructions. If tool
|
||||
* calls are found, each is invoked using the registered tool implementations, and their results
|
||||
* are collected.
|
||||
*
|
||||
* <p>Typical usage:
|
||||
*
|
||||
* <pre>{@code
|
||||
* OllamaToolsResult result = ollamaAPI.generateWithTools(
|
||||
* "my-model",
|
||||
* "What is the weather in Bengaluru?",
|
||||
* Options.defaultOptions(),
|
||||
* null // or a custom OllamaStreamHandler for streaming
|
||||
* );
|
||||
* String modelResponse = result.getModelResult().getResponse();
|
||||
* Map<ToolFunctionCallSpec, Object> toolResults = result.getToolResults();
|
||||
* }</pre>
|
||||
*
|
||||
* @param model the name or identifier of the AI model to use for generating the response
|
||||
* @param prompt the input text or prompt to provide to the AI model
|
||||
* @param options additional options or configurations to use when generating the response
|
||||
* @param streamHandler handler for streaming responses; if {@code null}, streaming is disabled
|
||||
* @return an {@link OllamaToolsResult} containing the model's response and the results of any
|
||||
* invoked tools. If the model does not request any tool calls, the tool results map will be
|
||||
* empty.
|
||||
* @throws OllamaBaseException if the Ollama API returns an error status
|
||||
*/
|
||||
@Deprecated
|
||||
private OllamaToolsResult generateWithTools(
|
||||
String model, String prompt, Options options, OllamaGenerateTokenHandler streamHandler)
|
||||
throws OllamaBaseException {
|
||||
OllamaGenerateRequest request =
|
||||
OllamaGenerateRequestBuilder.builder()
|
||||
.withModel(model)
|
||||
.withPrompt(prompt)
|
||||
.withOptions(options)
|
||||
.withUseTools(true)
|
||||
.build();
|
||||
// Execute unified path, but also return tools result by re-parsing
|
||||
OllamaResult res = generate(request, new OllamaGenerateStreamObserver(null, streamHandler));
|
||||
OllamaToolsResult tr = new OllamaToolsResult();
|
||||
tr.setModelResult(res);
|
||||
return tr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously generates a response for a prompt using a model running on the Ollama server.
|
||||
*
|
||||
* <p>This method returns an {@link OllamaAsyncResultStreamer} handle that can be used to poll
|
||||
* for status and retrieve streamed "thinking" and response tokens from the model. The call is
|
||||
* non-blocking.
|
||||
*
|
||||
* <p>Example usage:
|
||||
*
|
||||
* <pre>{@code
|
||||
* OllamaAsyncResultStreamer resultStreamer = ollamaAPI.generate("gpt-oss:20b", "Who are you", false, true);
|
||||
* int pollIntervalMilliseconds = 1000;
|
||||
* while (true) {
|
||||
* String thinkingTokens = resultStreamer.getThinkingResponseStream().poll();
|
||||
* String responseTokens = resultStreamer.getResponseStream().poll();
|
||||
* System.out.print(thinkingTokens != null ? thinkingTokens.toUpperCase() : "");
|
||||
* System.out.print(responseTokens != null ? responseTokens.toLowerCase() : "");
|
||||
* Thread.sleep(pollIntervalMilliseconds);
|
||||
* if (!resultStreamer.isAlive())
|
||||
* break;
|
||||
* }
|
||||
* System.out.println("Complete thinking response: " + resultStreamer.getCompleteThinkingResponse());
|
||||
* System.out.println("Complete response: " + resultStreamer.getCompleteResponse());
|
||||
* }</pre>
|
||||
*
|
||||
* @param model the Ollama model to use for generating the response
|
||||
* @param prompt the prompt or question text to send to the model
|
||||
* @param raw if {@code true}, returns the raw response from the model
|
||||
* @param think if {@code true}, streams "thinking" tokens as well as response tokens
|
||||
* @return an {@link OllamaAsyncResultStreamer} handle for polling and retrieving streamed
|
||||
* results
|
||||
* @throws OllamaBaseException if the request fails
|
||||
*/
|
||||
@Deprecated
|
||||
private OllamaAsyncResultStreamer generate(
|
||||
String model, String prompt, boolean raw, boolean think) throws OllamaBaseException {
|
||||
return generateAsync(model, prompt, raw, think);
|
||||
ArrayList<OllamaChatMessage> msgs = new ArrayList<>();
|
||||
OllamaChatRequest chatRequest = new OllamaChatRequest();
|
||||
chatRequest.setModel(request.getModel());
|
||||
OllamaChatMessage ocm = new OllamaChatMessage();
|
||||
ocm.setRole(OllamaChatMessageRole.USER);
|
||||
ocm.setResponse(request.getPrompt());
|
||||
chatRequest.setMessages(msgs);
|
||||
msgs.add(ocm);
|
||||
OllamaChatTokenHandler hdlr = null;
|
||||
chatRequest.setTools(tools);
|
||||
if (streamObserver != null) {
|
||||
chatRequest.setStream(true);
|
||||
hdlr =
|
||||
chatResponseModel ->
|
||||
streamObserver
|
||||
.getResponseStreamHandler()
|
||||
.accept(chatResponseModel.getMessage().getResponse());
|
||||
}
|
||||
OllamaChatResult res = chat(chatRequest, hdlr);
|
||||
return new OllamaResult(
|
||||
res.getResponseModel().getMessage().getResponse(),
|
||||
res.getResponseModel().getMessage().getThinking(),
|
||||
res.getResponseModel().getTotalDuration(),
|
||||
-1);
|
||||
}
|
||||
|
||||
public OllamaAsyncResultStreamer generateAsync(
|
||||
@ -996,83 +815,6 @@ public class OllamaAPI {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a response from a model running on the Ollama server using one or more images as
|
||||
* input.
|
||||
*
|
||||
* <p>This method allows you to provide images (as {@link File}, {@code byte[]}, or image URL
|
||||
* {@link String}) along with a prompt to the specified model. The images are automatically
|
||||
* encoded as base64 before being sent. Additional model options can be specified via the {@link
|
||||
* Options} parameter.
|
||||
*
|
||||
* <p>If a {@code streamHandler} is provided, the response will be streamed and the handler will
|
||||
* be called for each streamed response chunk. If {@code streamHandler} is {@code null},
|
||||
* streaming is disabled and the full response is returned synchronously.
|
||||
*
|
||||
* @param model the name of the Ollama model to use for generating the response
|
||||
* @param prompt the prompt or question text to send to the model
|
||||
* @param images a list of images to use for the question; each element must be a {@link File},
|
||||
* {@code byte[]}, or a URL {@link String}
|
||||
* @param options the {@link Options} object containing model parameters; see <a
|
||||
* href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">Ollama
|
||||
* model options documentation</a>
|
||||
* @param format a map specifying the output format, or null for default
|
||||
* @param streamHandler an optional callback that is invoked for each streamed response chunk;
|
||||
* if {@code null}, disables streaming and returns the full response synchronously
|
||||
* @return an {@link OllamaResult} containing the response text and time taken for the response
|
||||
* @throws OllamaBaseException if the response indicates an error status or an invalid image
|
||||
* type is provided
|
||||
*/
|
||||
@Deprecated
|
||||
private OllamaResult generateWithImages(
|
||||
String model,
|
||||
String prompt,
|
||||
List<Object> images,
|
||||
Options options,
|
||||
Map<String, Object> format,
|
||||
OllamaGenerateTokenHandler streamHandler)
|
||||
throws OllamaBaseException {
|
||||
try {
|
||||
List<String> encodedImages = new ArrayList<>();
|
||||
for (Object image : images) {
|
||||
if (image instanceof File) {
|
||||
LOG.debug("Using image file: {}", ((File) image).getAbsolutePath());
|
||||
encodedImages.add(encodeFileToBase64((File) image));
|
||||
} else if (image instanceof byte[]) {
|
||||
LOG.debug("Using image bytes: {} bytes", ((byte[]) image).length);
|
||||
encodedImages.add(encodeByteArrayToBase64((byte[]) image));
|
||||
} else if (image instanceof String) {
|
||||
LOG.debug("Using image URL: {}", image);
|
||||
encodedImages.add(
|
||||
encodeByteArrayToBase64(
|
||||
Utils.loadImageBytesFromUrl(
|
||||
(String) image,
|
||||
imageURLConnectTimeoutSeconds,
|
||||
imageURLReadTimeoutSeconds)));
|
||||
} else {
|
||||
throw new OllamaBaseException(
|
||||
"Unsupported image type. Please provide a File, byte[], or a URL"
|
||||
+ " String.");
|
||||
}
|
||||
}
|
||||
OllamaGenerateRequest ollamaRequestModel =
|
||||
OllamaGenerateRequestBuilder.builder()
|
||||
.withModel(model)
|
||||
.withPrompt(prompt)
|
||||
.withImagesBase64(encodedImages)
|
||||
.withOptions(options)
|
||||
.withFormat(format)
|
||||
.build();
|
||||
OllamaResult result =
|
||||
generate(
|
||||
ollamaRequestModel,
|
||||
new OllamaGenerateStreamObserver(null, streamHandler));
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw new OllamaBaseException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask a question to a model using an {@link OllamaChatRequest} and set up streaming response.
|
||||
* This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||
@ -1121,6 +863,10 @@ public class OllamaAPI {
|
||||
if (toolFunction == null) {
|
||||
throw new ToolInvocationException("Tool function not found: " + toolName);
|
||||
}
|
||||
LOG.debug(
|
||||
"Invoking tool {} with arguments: {}",
|
||||
toolCall.getFunction().getName(),
|
||||
toolCall.getFunction().getArguments());
|
||||
Map<String, Object> arguments = toolCall.getFunction().getArguments();
|
||||
Object res = toolFunction.apply(arguments);
|
||||
String argumentKeys =
|
||||
@ -1139,7 +885,6 @@ public class OllamaAPI {
|
||||
+ res
|
||||
+ " [/TOOL_RESULTS]"));
|
||||
}
|
||||
|
||||
if (tokenHandler != null) {
|
||||
result = requestCaller.call(request, tokenHandler);
|
||||
} else {
|
||||
@ -1396,7 +1141,7 @@ public class OllamaAPI {
|
||||
out = result;
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw new OllamaBaseException("Ping failed", e);
|
||||
throw new OllamaBaseException(e.getMessage(), e);
|
||||
} finally {
|
||||
MetricsRecorder.record(
|
||||
OllamaGenerateEndpointCaller.endpoint,
|
||||
|
@ -9,6 +9,7 @@
|
||||
package io.github.ollama4j.models.generate;
|
||||
|
||||
import io.github.ollama4j.models.request.OllamaCommonRequest;
|
||||
import io.github.ollama4j.tools.Tools;
|
||||
import io.github.ollama4j.utils.OllamaRequestBody;
|
||||
import java.util.List;
|
||||
import lombok.Getter;
|
||||
@ -25,6 +26,7 @@ public class OllamaGenerateRequest extends OllamaCommonRequest implements Ollama
|
||||
private boolean raw;
|
||||
private boolean think;
|
||||
private boolean useTools;
|
||||
private List<Tools.PromptFuncDefinition> tools;
|
||||
|
||||
public OllamaGenerateRequest() {}
|
||||
|
||||
|
@ -109,7 +109,6 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(
|
||||
new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
if (handleErrorStatus(statusCode, line, responseBuffer)) {
|
||||
@ -141,7 +140,7 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
|
||||
statusCode,
|
||||
responseBuffer);
|
||||
if (statusCode != 200) {
|
||||
LOG.error("Status code " + statusCode);
|
||||
LOG.error("Status code: " + statusCode);
|
||||
throw new OllamaBaseException(responseBuffer.toString());
|
||||
}
|
||||
if (wantedToolsForStream != null && ollamaChatResponseModel != null) {
|
||||
|
@ -107,21 +107,8 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||
new InputStreamReader(responseBodyStream, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
if (statusCode == 404) {
|
||||
LOG.warn("Status code: 404 (Not Found)");
|
||||
OllamaErrorResponse ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else if (statusCode == 401) {
|
||||
LOG.warn("Status code: 401 (Unauthorized)");
|
||||
OllamaErrorResponse ollamaResponseModel =
|
||||
Utils.getObjectMapper()
|
||||
.readValue(
|
||||
"{\"error\":\"Unauthorized\"}",
|
||||
OllamaErrorResponse.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
} else if (statusCode == 400) {
|
||||
LOG.warn("Status code: 400 (Bad Request)");
|
||||
if (statusCode >= 400) {
|
||||
LOG.warn("Error code: {}", statusCode);
|
||||
OllamaErrorResponse ollamaResponseModel =
|
||||
Utils.getObjectMapper().readValue(line, OllamaErrorResponse.class);
|
||||
responseBuffer.append(ollamaResponseModel.getError());
|
||||
@ -140,6 +127,7 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||
|
||||
if (statusCode != 200) {
|
||||
LOG.error("Status code: {}", statusCode);
|
||||
LOG.error("Response: {}", responseBuffer);
|
||||
throw new OllamaBaseException(responseBuffer.toString());
|
||||
} else {
|
||||
long endTime = System.currentTimeMillis();
|
||||
@ -149,7 +137,6 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
|
||||
thinkingBuffer.toString(),
|
||||
endTime - startTime,
|
||||
statusCode);
|
||||
|
||||
ollamaResult.setModel(ollamaGenerateResponseModel.getModel());
|
||||
ollamaResult.setCreatedAt(ollamaGenerateResponseModel.getCreatedAt());
|
||||
ollamaResult.setDone(ollamaGenerateResponseModel.isDone());
|
||||
|
@ -18,11 +18,13 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import lombok.Data;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
/**
|
||||
* The type Ollama result.
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
@SuppressWarnings("unused")
|
||||
@Data
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
|
@ -1,76 +0,0 @@
|
||||
/*
|
||||
* Ollama4j - Java library for interacting with Ollama server.
|
||||
* Copyright (c) 2025 Amith Koujalgi and contributors.
|
||||
*
|
||||
* Licensed under the MIT License (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
*
|
||||
*/
|
||||
package io.github.ollama4j.tools.sampletools;
|
||||
|
||||
import io.github.ollama4j.tools.Tools;
|
||||
import java.util.Map;
|
||||
|
||||
@SuppressWarnings("resource")
|
||||
public class WeatherTool {
|
||||
private String paramCityName = "cityName";
|
||||
|
||||
/**
|
||||
* Default constructor for WeatherTool.
|
||||
* This constructor is intentionally left empty because no initialization is required
|
||||
* for this sample tool. If future state or dependencies are needed, they can be added here.
|
||||
*/
|
||||
public WeatherTool() {
|
||||
// No initialization required
|
||||
}
|
||||
|
||||
public String getCurrentWeather(Map<String, Object> arguments) {
|
||||
String city = (String) arguments.get(paramCityName);
|
||||
return "It is sunny in " + city;
|
||||
}
|
||||
|
||||
public Tools.ToolSpecification getSpecification() {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("weather-reporter")
|
||||
.functionDescription(
|
||||
"You are a tool who simply finds the city name from the user's message"
|
||||
+ " input/query about weather.")
|
||||
.toolFunction(this::getCurrentWeather)
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-city-name")
|
||||
.description("Get the city name")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters
|
||||
.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
paramCityName,
|
||||
Tools
|
||||
.PromptFuncDefinition
|
||||
.Property
|
||||
.builder()
|
||||
.type(
|
||||
"string")
|
||||
.description(
|
||||
"The name"
|
||||
+ " of the"
|
||||
+ " city."
|
||||
+ " e.g."
|
||||
+ " Bengaluru")
|
||||
.required(
|
||||
true)
|
||||
.build()))
|
||||
.required(
|
||||
java.util.List.of(
|
||||
paramCityName))
|
||||
.build())
|
||||
.build())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
}
|
@ -25,13 +25,14 @@ import io.github.ollama4j.models.request.CustomModelRequest;
|
||||
import io.github.ollama4j.models.response.ModelDetail;
|
||||
import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
|
||||
import io.github.ollama4j.models.response.OllamaResult;
|
||||
import io.github.ollama4j.tools.ToolFunction;
|
||||
import io.github.ollama4j.tools.Tools;
|
||||
import io.github.ollama4j.tools.sampletools.WeatherTool;
|
||||
import io.github.ollama4j.utils.OptionsBuilder;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@ -100,7 +101,7 @@ class TestMockedAPIs {
|
||||
verify(ollamaAPI, times(1)).registerTools(Collections.emptyList());
|
||||
|
||||
List<Tools.ToolSpecification> toolSpecifications = new ArrayList<>();
|
||||
toolSpecifications.add(new WeatherTool().getSpecification());
|
||||
toolSpecifications.add(getSampleToolSpecification());
|
||||
doNothing().when(ollamaAPI).registerTools(toolSpecifications);
|
||||
ollamaAPI.registerTools(toolSpecifications);
|
||||
verify(ollamaAPI, times(1)).registerTools(toolSpecifications);
|
||||
@ -320,4 +321,51 @@ class TestMockedAPIs {
|
||||
throw new RuntimeException("Failed to run test: testGetRoleFound");
|
||||
}
|
||||
}
|
||||
|
||||
private static Tools.ToolSpecification getSampleToolSpecification() {
|
||||
return Tools.ToolSpecification.builder()
|
||||
.functionName("current-weather")
|
||||
.functionDescription("Get current weather")
|
||||
.toolFunction(
|
||||
new ToolFunction() {
|
||||
@Override
|
||||
public Object apply(Map<String, Object> arguments) {
|
||||
String location = arguments.get("city").toString();
|
||||
return "Currently " + location + "'s weather is beautiful.";
|
||||
}
|
||||
})
|
||||
.toolPrompt(
|
||||
Tools.PromptFuncDefinition.builder()
|
||||
.type("prompt")
|
||||
.function(
|
||||
Tools.PromptFuncDefinition.PromptFuncSpec.builder()
|
||||
.name("get-location-weather-info")
|
||||
.description("Get location details")
|
||||
.parameters(
|
||||
Tools.PromptFuncDefinition.Parameters
|
||||
.builder()
|
||||
.type("object")
|
||||
.properties(
|
||||
Map.of(
|
||||
"city",
|
||||
Tools
|
||||
.PromptFuncDefinition
|
||||
.Property
|
||||
.builder()
|
||||
.type(
|
||||
"string")
|
||||
.description(
|
||||
"The city,"
|
||||
+ " e.g."
|
||||
+ " New Delhi,"
|
||||
+ " India")
|
||||
.required(
|
||||
true)
|
||||
.build()))
|
||||
.required(java.util.List.of("city"))
|
||||
.build())
|
||||
.build())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user