forked from Mirror/ollama4j
		
	Merge remote-tracking branch 'origin/main'
This commit is contained in:
		
							
								
								
									
										6
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								Makefile
									
									
									
									
									
								
							@@ -1,3 +1,7 @@
 | 
			
		||||
# Default target
 | 
			
		||||
.PHONY: all
 | 
			
		||||
all: dev build
 | 
			
		||||
 | 
			
		||||
dev:
 | 
			
		||||
	@echo "Setting up dev environment..."
 | 
			
		||||
	@command -v pre-commit >/dev/null 2>&1 || { echo "Error: pre-commit is not installed. Please install it first."; exit 1; }
 | 
			
		||||
@@ -43,7 +47,7 @@ doxygen:
 | 
			
		||||
	@doxygen Doxyfile
 | 
			
		||||
 | 
			
		||||
javadoc:
 | 
			
		||||
	@echo "\033[0;34mGenerating Javadocs into '$(javadocfolder)'...\033[0m"
 | 
			
		||||
	@echo "\033[0;34mGenerating Javadocs...\033[0m"
 | 
			
		||||
	@mvn clean javadoc:javadoc
 | 
			
		||||
	@if [ -f "target/reports/apidocs/index.html" ]; then \
 | 
			
		||||
		echo "\033[0;32mJavadocs generated in target/reports/apidocs/index.html\033[0m"; \
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										60
									
								
								docs/docs/agent.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								docs/docs/agent.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,60 @@
 | 
			
		||||
---
 | 
			
		||||
sidebar_position: 4
 | 
			
		||||
 | 
			
		||||
title: Agents
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
import CodeEmbed from '@site/src/components/CodeEmbed';
 | 
			
		||||
 | 
			
		||||
# Agents
 | 
			
		||||
 | 
			
		||||
Build powerful, flexible agents—backed by LLMs and tools—in a few minutes.
 | 
			
		||||
 | 
			
		||||
Ollama4j’s agent system lets you bring together the best of LLM reasoning and external tool-use using a simple, declarative YAML configuration. No framework bloat, no complicated setup—just describe your agent, plug in your logic, and go.
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
**Why use agents in Ollama4j?**
 | 
			
		||||
 | 
			
		||||
- **Effortless Customization:** Instantly adjust your agent’s persona, reasoning strategies, or domain by tweaking YAML. No need to touch your compiled Java code.
 | 
			
		||||
- **Easy Extensibility:** Want new capabilities? Just add or change tools and logic classes—no framework glue or plumbing required.
 | 
			
		||||
- **Fast Experimentation:** Mix-and-match models, instructions, and tools—prototype sophisticated behaviors or orchestrators in minutes.
 | 
			
		||||
- **Clean Separation:** Keep business logic (Java) and agent personality/configuration (YAML) separate for maintainability and clarity.
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
## Define an Agent in YAML
 | 
			
		||||
 | 
			
		||||
Specify everything about your agent—what LLM it uses, its “personality,” and all callable tools—in a single YAML file.
 | 
			
		||||
 | 
			
		||||
**Agent YAML keys:**
 | 
			
		||||
 | 
			
		||||
| Field                   | Description                                                                                                           |
 | 
			
		||||
|-------------------------|-----------------------------------------------------------------------------------------------------------------------|
 | 
			
		||||
| `name`                  | Name of your agent.                                                                                                   |
 | 
			
		||||
| `host`                  | The base URL for your Ollama server (e.g., `http://localhost:11434`).                                                |
 | 
			
		||||
| `model`                 | The LLM backing your agent (e.g., `llama2`, `mistral`, `mixtral`, etc).                                              |
 | 
			
		||||
| `customPrompt`          | _(optional)_ System prompt—instructions or persona for your agent.                                                   |
 | 
			
		||||
| `tools`                 | List of tools the agent can use. Each tool entry describes the name, function, and parameters.                        |
 | 
			
		||||
| `toolFunctionFQCN`      | Fully qualified Java class name implementing the tool logic. Must be present on classpath.                            |
 | 
			
		||||
| `requestTimeoutSeconds` | _(optional)_ How long (seconds) to wait for agent replies.                                                            |
 | 
			
		||||
 | 
			
		||||
YAML makes it effortless to configure and tweak your agent’s powers and behavior—no code changes needed!
 | 
			
		||||
 | 
			
		||||
**Example agent YAML:**
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/resources/agent.yaml" language='yaml'/>
 | 
			
		||||
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
## Instantiating and Running Agents in Java
 | 
			
		||||
 | 
			
		||||
Once your agent is described in YAML, bringing it to life in Java takes only a couple of lines:
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/AgentExample.java"/>
 | 
			
		||||
 | 
			
		||||
- **No boilerplate.** Just load and start chatting or calling tools.
 | 
			
		||||
- The API takes care of wiring up LLMs, tool invocation, and instruction handling.
 | 
			
		||||
 | 
			
		||||
Ready to build your own AI-powered assistant? Just write your YAML, implement the tool logic in Java, and go!
 | 
			
		||||
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
    "label": "APIs - Extras",
 | 
			
		||||
    "position": 4,
 | 
			
		||||
    "label": "Extras",
 | 
			
		||||
    "position": 5,
 | 
			
		||||
    "link": {
 | 
			
		||||
        "type": "generated-index",
 | 
			
		||||
        "description": "Details of APIs to handle bunch of extra stuff."
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
{
 | 
			
		||||
    "label": "APIs - Generate",
 | 
			
		||||
    "label": "Generate",
 | 
			
		||||
    "position": 3,
 | 
			
		||||
    "link": {
 | 
			
		||||
        "type": "generated-index",
 | 
			
		||||
 
 | 
			
		||||
@@ -66,11 +66,11 @@ To use a method as a tool within a chat call, follow these steps:
 | 
			
		||||
Let's try an example. Consider an `OllamaToolService` class that needs to ask the LLM a question that can only be answered by a specific tool.
 | 
			
		||||
This tool is implemented within a `GlobalConstantGenerator` class. Following is the code that exposes an annotated method as a tool:
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/annotated/GlobalConstantGenerator.java"/>
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/annotated/GlobalConstantGenerator.java"/>
 | 
			
		||||
 | 
			
		||||
The annotated method can then be used as a tool in the chat session:
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/annotated/AnnotatedToolCallingExample.java"/>
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/AnnotatedToolCallingExample.java"/>
 | 
			
		||||
 | 
			
		||||
Running the above would produce a response similar to:
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -63,7 +63,7 @@ You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
### Using a simple Console Output Stream Handler
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ConsoleOutputStreamHandlerExample.java" />
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatWithConsoleHandlerExample.java" />
 | 
			
		||||
 | 
			
		||||
### With a Stream Handler to receive the tokens as they are generated
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -19,11 +19,11 @@ You can use this feature to receive both the thinking and the response as separa
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
:::tip[Thinking Tokens]
 | 
			
		||||
User asks "Who are you?" It's a request for identity. As ChatGPT, we should explain that I'm an AI developed by OpenAI, etc. Provide friendly explanation.
 | 
			
		||||
USER ASKS "WHO ARE YOU?" IT'S A REQUEST FOR IDENTITY. AS CHATGPT, WE SHOULD EXPLAIN THAT I'M AN AI DEVELOPED BY OPENAI, ETC. PROVIDE FRIENDLY EXPLANATION.
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
:::tip[Response Tokens]
 | 
			
		||||
I’m ChatGPT, a large language model created by OpenAI. I’m designed to understand and generate natural‑language text, so I can answer questions, help with writing, explain concepts, brainstorm ideas, and chat about almost any topic. I don’t have a personal life or consciousness—I’m a tool that processes input and produces responses based on patterns in the data I was trained on. If you have any questions about how I work or what I can do, feel free to ask!
 | 
			
		||||
i’m chatgpt, a large language model created by openai. i’m designed to understand and generate natural‑language text, so i can answer questions, help with writing, explain concepts, brainstorm ideas, and chat about almost any topic. i don’t have a personal life or consciousness—i’m a tool that processes input and produces responses based on patterns in the data i was trained on. if you have any questions about how i work or what i can do, feel free to ask!
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
### Generate response and receive the thinking and response tokens streamed
 | 
			
		||||
@@ -34,7 +34,7 @@ You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
:::tip[Thinking Tokens]
 | 
			
		||||
<TypewriterTextarea
 | 
			
		||||
textContent={`User asks "Who are you?" It's a request for identity. As ChatGPT, we should explain that I'm an AI developed by OpenAI, etc. Provide friendly explanation.`}
 | 
			
		||||
textContent={`USER ASKS "WHO ARE YOU?" WE SHOULD EXPLAIN THAT I'M AN AI BY OPENAI, ETC.`}
 | 
			
		||||
typingSpeed={10}
 | 
			
		||||
pauseBetweenSentences={1200}
 | 
			
		||||
height="auto"
 | 
			
		||||
@@ -45,7 +45,7 @@ style={{ whiteSpace: 'pre-line' }}
 | 
			
		||||
 | 
			
		||||
:::tip[Response Tokens]
 | 
			
		||||
<TypewriterTextarea
 | 
			
		||||
textContent={`I’m ChatGPT, a large language model created by OpenAI. I’m designed to understand and generate natural‑language text, so I can answer questions, help with writing, explain concepts, brainstorm ideas, and chat about almost any topic. I don’t have a personal life or consciousness—I’m a tool that processes input and produces responses based on patterns in the data I was trained on. If you have any questions about how I work or what I can do, feel free to ask!`}
 | 
			
		||||
textContent={`i’m chatgpt, a large language model created by openai.`}
 | 
			
		||||
typingSpeed={10}
 | 
			
		||||
pauseBetweenSentences={1200}
 | 
			
		||||
height="auto"
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ sidebar_position: 4
 | 
			
		||||
---
 | 
			
		||||
 | 
			
		||||
import CodeEmbed from '@site/src/components/CodeEmbed';
 | 
			
		||||
import TypewriterTextarea from '@site/src/components/TypewriterTextarea';
 | 
			
		||||
 | 
			
		||||
# Generate with Images
 | 
			
		||||
 | 
			
		||||
@@ -17,13 +18,11 @@ recommended.
 | 
			
		||||
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
## Synchronous mode
 | 
			
		||||
 | 
			
		||||
If you have this image downloaded and you pass the path to the downloaded image to the following code:
 | 
			
		||||
 | 
			
		||||

 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageFile.java" />
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageFileSimple.java" />
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
@@ -32,30 +31,22 @@ This image features a white boat with brown cushions, where a dog is sitting on
 | 
			
		||||
be enjoying its time outdoors, perhaps on a lake.
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
# Generate with Image URLs
 | 
			
		||||
 | 
			
		||||
This API lets you ask questions along with the image files to the LLMs.
 | 
			
		||||
This API corresponds to
 | 
			
		||||
the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 | 
			
		||||
 | 
			
		||||
:::note
 | 
			
		||||
 | 
			
		||||
Executing this on Ollama server running in CPU-mode will take longer to generate response. Hence, GPU-mode is
 | 
			
		||||
recommended.
 | 
			
		||||
 | 
			
		||||
:::
 | 
			
		||||
 | 
			
		||||
## Ask (Sync)
 | 
			
		||||
 | 
			
		||||
Passing the link of this image the following code:
 | 
			
		||||
If you want the response to be streamed, you can use the following code:
 | 
			
		||||
 | 
			
		||||

 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageURL.java" />
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageFileStreaming.java" />
 | 
			
		||||
 | 
			
		||||
You will get a response similar to:
 | 
			
		||||
 | 
			
		||||
:::tip[LLM Response]
 | 
			
		||||
This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to
 | 
			
		||||
be enjoying its time outdoors, perhaps on a lake.
 | 
			
		||||
:::tip[Response Tokens]
 | 
			
		||||
<TypewriterTextarea
 | 
			
		||||
textContent={`This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to be enjoying its time outdoors, perhaps on a lake.`}
 | 
			
		||||
typingSpeed={10}
 | 
			
		||||
pauseBetweenSentences={1200}
 | 
			
		||||
height="auto"
 | 
			
		||||
width="100%"
 | 
			
		||||
style={{ whiteSpace: 'pre-line' }}
 | 
			
		||||
/>
 | 
			
		||||
:::
 | 
			
		||||
@@ -36,19 +36,19 @@ We can create static functions as our tools.
 | 
			
		||||
This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
 | 
			
		||||
fuel price value.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/FuelPriceTool.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolfunctions/FuelPriceToolFunction.java"/ >
 | 
			
		||||
 | 
			
		||||
This function takes the argument `city` and performs an operation with the argument and returns the weather for a
 | 
			
		||||
location.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/WeatherTool.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolfunctions/WeatherToolFunction.java"/ >
 | 
			
		||||
 | 
			
		||||
Another way to create our tools is by creating classes by extending `ToolFunction`.
 | 
			
		||||
 | 
			
		||||
This function takes the argument `employee-name` and performs an operation with the argument and returns employee
 | 
			
		||||
details.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/DBQueryFunction.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolfunctions/EmployeeFinderToolFunction.java"/ >
 | 
			
		||||
 | 
			
		||||
### Define Tool Specifications
 | 
			
		||||
 | 
			
		||||
@@ -57,21 +57,21 @@ Lets define a sample tool specification called **Fuel Price Tool** for getting t
 | 
			
		||||
- Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
 | 
			
		||||
- Associate the `getCurrentFuelPrice` function you defined earlier.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/FuelPriceToolSpec.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolspecs/FuelPriceToolSpec.java"/ >
 | 
			
		||||
 | 
			
		||||
Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
 | 
			
		||||
 | 
			
		||||
- Specify the function `name`, `description`, and `required` property (`city`).
 | 
			
		||||
- Associate the `getCurrentWeather` function you defined earlier.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/WeatherToolSpec.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolspecs/WeatherToolSpec.java"/ >
 | 
			
		||||
 | 
			
		||||
Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
 | 
			
		||||
 | 
			
		||||
- Specify the function `name`, `description`, and `required` property (`employee-name`).
 | 
			
		||||
- Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
 | 
			
		||||
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/DatabaseQueryToolSpec.java"/ >
 | 
			
		||||
<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/tools/toolspecs/EmployeeFinderToolSpec.java"/ >
 | 
			
		||||
 | 
			
		||||
Now put it all together by registering the tools and prompting with tools.
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
{
 | 
			
		||||
    "label": "APIs - Manage Models",
 | 
			
		||||
    "label": "Manage Models",
 | 
			
		||||
    "position": 2,
 | 
			
		||||
    "link": {
 | 
			
		||||
        "type": "generated-index",
 | 
			
		||||
 
 | 
			
		||||
@@ -15,13 +15,13 @@ This API lets you create a custom model on the Ollama server.
 | 
			
		||||
You would see these logs while the custom model is being created:
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
{"status":"using existing layer sha256:fad2a06e4cc705c2fa8bec5477ddb00dc0c859ac184c34dcc5586663774161ca"}
 | 
			
		||||
{"status":"using existing layer sha256:41c2cf8c272f6fb0080a97cd9d9bd7d4604072b80a0b10e7d65ca26ef5000c0c"}
 | 
			
		||||
{"status":"using existing layer sha256:1da0581fd4ce92dcf5a66b1da737cf215d8dcf25aa1b98b44443aaf7173155f5"}
 | 
			
		||||
{"status":"creating new layer sha256:941b69ca7dc2a85c053c38d9e8029c9df6224e545060954fa97587f87c044a64"}
 | 
			
		||||
{"status":"using existing layer sha256:f02dd72bb2423204352eabc5637b44d79d17f109fdb510a7c51455892aa2d216"}
 | 
			
		||||
{"status":"writing manifest"}
 | 
			
		||||
{"status":"success"}
 | 
			
		||||
using existing layer sha256:fad2a06e4cc705c2fa8bec5477ddb00dc0c859ac184c34dcc5586663774161ca
 | 
			
		||||
using existing layer sha256:41c2cf8c272f6fb0080a97cd9d9bd7d4604072b80a0b10e7d65ca26ef5000c0c
 | 
			
		||||
using existing layer sha256:1da0581fd4ce92dcf5a66b1da737cf215d8dcf25aa1b98b44443aaf7173155f5
 | 
			
		||||
creating new layer sha256:941b69ca7dc2a85c053c38d9e8029c9df6224e545060954fa97587f87c044a64
 | 
			
		||||
using existing layer sha256:f02dd72bb2423204352eabc5637b44d79d17f109fdb510a7c51455892aa2d216
 | 
			
		||||
writing manifest
 | 
			
		||||
success
 | 
			
		||||
```
 | 
			
		||||
Once created, you can see it when you use [list models](./list-models) API.
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
---
 | 
			
		||||
sidebar_position: 5
 | 
			
		||||
sidebar_position: 6
 | 
			
		||||
 | 
			
		||||
title: Metrics
 | 
			
		||||
---
 | 
			
		||||
 
 | 
			
		||||
@@ -1,84 +1,14 @@
 | 
			
		||||
// import React, { useState, useEffect } from 'react';
 | 
			
		||||
// import CodeBlock from '@theme/CodeBlock';
 | 
			
		||||
// import Icon from '@site/src/components/Icon';
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// const CodeEmbed = ({ src }) => {
 | 
			
		||||
//     const [code, setCode] = useState('');
 | 
			
		||||
//     const [loading, setLoading] = useState(true);
 | 
			
		||||
//     const [error, setError] = useState(null);
 | 
			
		||||
 | 
			
		||||
//     useEffect(() => {
 | 
			
		||||
//         let isMounted = true;
 | 
			
		||||
 | 
			
		||||
//         const fetchCodeFromUrl = async (url) => {
 | 
			
		||||
//             if (!isMounted) return;
 | 
			
		||||
 | 
			
		||||
//             setLoading(true);
 | 
			
		||||
//             setError(null);
 | 
			
		||||
 | 
			
		||||
//             try {
 | 
			
		||||
//                 const response = await fetch(url);
 | 
			
		||||
//                 if (!response.ok) {
 | 
			
		||||
//                     throw new Error(`HTTP error! status: ${response.status}`);
 | 
			
		||||
//                 }
 | 
			
		||||
//                 const data = await response.text();
 | 
			
		||||
//                 if (isMounted) {
 | 
			
		||||
//                     setCode(data);
 | 
			
		||||
//                 }
 | 
			
		||||
//             } catch (err) {
 | 
			
		||||
//                 console.error('Failed to fetch code:', err);
 | 
			
		||||
//                 if (isMounted) {
 | 
			
		||||
//                     setError(err);
 | 
			
		||||
//                     setCode(`// Failed to load code from ${url}\n// ${err.message}`);
 | 
			
		||||
//                 }
 | 
			
		||||
//             } finally {
 | 
			
		||||
//                 if (isMounted) {
 | 
			
		||||
//                     setLoading(false);
 | 
			
		||||
//                 }
 | 
			
		||||
//             }
 | 
			
		||||
//         };
 | 
			
		||||
 | 
			
		||||
//         if (src) {
 | 
			
		||||
//             fetchCodeFromUrl(src);
 | 
			
		||||
//         }
 | 
			
		||||
 | 
			
		||||
//         return () => {
 | 
			
		||||
//             isMounted = false;
 | 
			
		||||
//         };
 | 
			
		||||
//     }, [src]);
 | 
			
		||||
 | 
			
		||||
//     const githubUrl = src ? src.replace('https://raw.githubusercontent.com', 'https://github.com').replace('/refs/heads/', '/blob/') : null;
 | 
			
		||||
//     const fileName = src ? src.substring(src.lastIndexOf('/') + 1) : null;
 | 
			
		||||
 | 
			
		||||
//     return (
 | 
			
		||||
//         loading ? (
 | 
			
		||||
//             <div>Loading code...</div>
 | 
			
		||||
//         ) : error ? (
 | 
			
		||||
//             <div>Error: {error.message}</div>
 | 
			
		||||
//         ) : (
 | 
			
		||||
//             <div style={{ backgroundColor: 'transparent', padding: '0px', borderRadius: '5px' }}>
 | 
			
		||||
//                 <div style={{ textAlign: 'right' }}>
 | 
			
		||||
//                     {githubUrl && (
 | 
			
		||||
//                         <a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{ paddingRight: '15px', color: 'gray', fontSize: '0.8em', fontStyle: 'italic', display: 'inline-flex', alignItems: 'center' }}>
 | 
			
		||||
//                             View on GitHub
 | 
			
		||||
//                             <Icon icon="mdi:github" height="48" />
 | 
			
		||||
//                         </a>
 | 
			
		||||
//                     )}
 | 
			
		||||
//                 </div>
 | 
			
		||||
//                 <CodeBlock title={fileName} className="language-java">{code}</CodeBlock>
 | 
			
		||||
//             </div>
 | 
			
		||||
//         )
 | 
			
		||||
//     );
 | 
			
		||||
// };
 | 
			
		||||
 | 
			
		||||
// export default CodeEmbed;
 | 
			
		||||
import React, { useState, useEffect } from 'react';
 | 
			
		||||
import React, {useState, useEffect} from 'react';
 | 
			
		||||
import CodeBlock from '@theme/CodeBlock';
 | 
			
		||||
import Icon from '@site/src/components/Icon';
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
const CodeEmbed = ({ src }) => {
 | 
			
		||||
/**
 | 
			
		||||
 * CodeEmbed component to display code fetched from a URL in a CodeBlock.
 | 
			
		||||
 * @param {object} props
 | 
			
		||||
 * @param {string} props.src - Source URL to fetch the code from.
 | 
			
		||||
 * @param {string} [props.language='java'] - Language for syntax highlighting in CodeBlock.
 | 
			
		||||
 */
 | 
			
		||||
const CodeEmbed = ({src, language = 'java'}) => {
 | 
			
		||||
    const [code, setCode] = useState('');
 | 
			
		||||
    const [loading, setLoading] = useState(true);
 | 
			
		||||
    const [error, setError] = useState(null);
 | 
			
		||||
@@ -127,7 +57,7 @@ const CodeEmbed = ({ src }) => {
 | 
			
		||||
    const fileName = src ? src.substring(src.lastIndexOf('/') + 1) : null;
 | 
			
		||||
 | 
			
		||||
    const title = (
 | 
			
		||||
        <div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
 | 
			
		||||
        <div style={{display: 'flex', justifyContent: 'space-between', alignItems: 'center'}}>
 | 
			
		||||
            <a
 | 
			
		||||
                href={githubUrl}
 | 
			
		||||
                target="_blank"
 | 
			
		||||
@@ -146,9 +76,15 @@ const CodeEmbed = ({ src }) => {
 | 
			
		||||
                <span>{fileName}</span>
 | 
			
		||||
            </a>
 | 
			
		||||
            {githubUrl && (
 | 
			
		||||
                <a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{ color: 'gray', fontSize: '0.9em', fontStyle: 'italic', display: 'inline-flex', alignItems: 'center' }}>
 | 
			
		||||
                <a href={githubUrl} target="_blank" rel="noopener noreferrer" style={{
 | 
			
		||||
                    color: 'gray',
 | 
			
		||||
                    fontSize: '0.9em',
 | 
			
		||||
                    fontStyle: 'italic',
 | 
			
		||||
                    display: 'inline-flex',
 | 
			
		||||
                    alignItems: 'center'
 | 
			
		||||
                }}>
 | 
			
		||||
                    View on GitHub
 | 
			
		||||
                    <Icon icon="mdi:github" height="1em" />
 | 
			
		||||
                    <Icon icon="mdi:github" height="1em"/>
 | 
			
		||||
                </a>
 | 
			
		||||
            )}
 | 
			
		||||
        </div>
 | 
			
		||||
@@ -160,8 +96,8 @@ const CodeEmbed = ({ src }) => {
 | 
			
		||||
        ) : error ? (
 | 
			
		||||
            <div>Error: {error.message}</div>
 | 
			
		||||
        ) : (
 | 
			
		||||
            <div style={{ backgroundColor: 'transparent', padding: '0px', borderRadius: '5px' }}>
 | 
			
		||||
                <CodeBlock title={title} className="language-java">{code}</CodeBlock>
 | 
			
		||||
            <div style={{backgroundColor: 'transparent', padding: '0px', borderRadius: '5px'}}>
 | 
			
		||||
                <CodeBlock title={title} language={language}>{code}</CodeBlock>
 | 
			
		||||
            </div>
 | 
			
		||||
        )
 | 
			
		||||
    );
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										9
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										9
									
								
								pom.xml
									
									
									
									
									
								
							@@ -259,6 +259,11 @@
 | 
			
		||||
            <artifactId>jackson-databind</artifactId>
 | 
			
		||||
            <version>2.20.0</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.fasterxml.jackson.dataformat</groupId>
 | 
			
		||||
            <artifactId>jackson-dataformat-yaml</artifactId>
 | 
			
		||||
            <version>2.20.0</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.fasterxml.jackson.datatype</groupId>
 | 
			
		||||
            <artifactId>jackson-datatype-jsr310</artifactId>
 | 
			
		||||
@@ -275,7 +280,6 @@
 | 
			
		||||
            <artifactId>slf4j-api</artifactId>
 | 
			
		||||
            <version>2.0.17</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>org.junit.jupiter</groupId>
 | 
			
		||||
            <artifactId>junit-jupiter-api</artifactId>
 | 
			
		||||
@@ -294,7 +298,6 @@
 | 
			
		||||
            <version>20250517</version>
 | 
			
		||||
            <scope>test</scope>
 | 
			
		||||
        </dependency>
 | 
			
		||||
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>org.testcontainers</groupId>
 | 
			
		||||
            <artifactId>ollama</artifactId>
 | 
			
		||||
@@ -307,14 +310,12 @@
 | 
			
		||||
            <version>1.21.3</version>
 | 
			
		||||
            <scope>test</scope>
 | 
			
		||||
        </dependency>
 | 
			
		||||
 | 
			
		||||
        <!-- Prometheus metrics dependencies -->
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>io.prometheus</groupId>
 | 
			
		||||
            <artifactId>simpleclient</artifactId>
 | 
			
		||||
            <version>0.16.0</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.google.guava</groupId>
 | 
			
		||||
            <artifactId>guava</artifactId>
 | 
			
		||||
 
 | 
			
		||||
@@ -805,6 +805,7 @@ public class Ollama {
 | 
			
		||||
        chatRequest.setMessages(msgs);
 | 
			
		||||
        msgs.add(ocm);
 | 
			
		||||
        OllamaChatTokenHandler hdlr = null;
 | 
			
		||||
        chatRequest.setUseTools(true);
 | 
			
		||||
        chatRequest.setTools(request.getTools());
 | 
			
		||||
        if (streamObserver != null) {
 | 
			
		||||
            chatRequest.setStream(true);
 | 
			
		||||
@@ -861,7 +862,7 @@ public class Ollama {
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Sends a chat request to a model using an {@link OllamaChatRequest} and sets up streaming response.
 | 
			
		||||
     * This can be constructed using an {@link OllamaChatRequestBuilder}.
 | 
			
		||||
     * This can be constructed using an {@link OllamaChatRequest#builder()}.
 | 
			
		||||
     *
 | 
			
		||||
     * <p>Note: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
			
		||||
     *
 | 
			
		||||
@@ -881,7 +882,7 @@ public class Ollama {
 | 
			
		||||
            // only add tools if tools flag is set
 | 
			
		||||
            if (request.isUseTools()) {
 | 
			
		||||
                // add all registered tools to request
 | 
			
		||||
                request.setTools(toolRegistry.getRegisteredTools());
 | 
			
		||||
                request.getTools().addAll(toolRegistry.getRegisteredTools());
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            if (tokenHandler != null) {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										318
									
								
								src/main/java/io/github/ollama4j/agent/Agent.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										318
									
								
								src/main/java/io/github/ollama4j/agent/Agent.java
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,318 @@
 | 
			
		||||
/*
 | 
			
		||||
 * Ollama4j - Java library for interacting with Ollama server.
 | 
			
		||||
 * Copyright (c) 2025 Amith Koujalgi and contributors.
 | 
			
		||||
 *
 | 
			
		||||
 * Licensed under the MIT License (the "License");
 | 
			
		||||
 * you may not use this file except in compliance with the License.
 | 
			
		||||
 *
 | 
			
		||||
*/
 | 
			
		||||
package io.github.ollama4j.agent;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 | 
			
		||||
import io.github.ollama4j.Ollama;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
 | 
			
		||||
import io.github.ollama4j.models.chat.*;
 | 
			
		||||
import io.github.ollama4j.tools.ToolFunction;
 | 
			
		||||
import io.github.ollama4j.tools.Tools;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Scanner;
 | 
			
		||||
import lombok.*;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * The {@code Agent} class represents an AI assistant capable of interacting with the Ollama API
 | 
			
		||||
 * server.
 | 
			
		||||
 *
 | 
			
		||||
 * <p>It supports the use of tools (interchangeable code components), persistent chat history, and
 | 
			
		||||
 * interactive as well as pre-scripted chat sessions.
 | 
			
		||||
 *
 | 
			
		||||
 * <h2>Usage</h2>
 | 
			
		||||
 *
 | 
			
		||||
 * <ul>
 | 
			
		||||
 *   <li>Instantiate an Agent via {@link #load(String)} for YAML-based configuration.
 | 
			
		||||
 *   <li>Handle conversation turns via {@link #interact(String, OllamaChatStreamObserver)}.
 | 
			
		||||
 *   <li>Use {@link #runInteractive()} for an interactive console-based session.
 | 
			
		||||
 * </ul>
 | 
			
		||||
 */
 | 
			
		||||
public class Agent {
 | 
			
		||||
    /**
 | 
			
		||||
     * The agent's display name
 | 
			
		||||
     */
 | 
			
		||||
    private final String name;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * List of supported tools for this agent
 | 
			
		||||
     */
 | 
			
		||||
    private final List<Tools.Tool> tools;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Ollama client instance for communication with the API
 | 
			
		||||
     */
 | 
			
		||||
    private final Ollama ollamaClient;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * The model name used for chat completions
 | 
			
		||||
     */
 | 
			
		||||
    private final String model;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Persists chat message history across rounds
 | 
			
		||||
     */
 | 
			
		||||
    private final List<OllamaChatMessage> chatHistory;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Optional custom system prompt for the agent
 | 
			
		||||
     */
 | 
			
		||||
    private final String customPrompt;
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Constructs a new Agent.
 | 
			
		||||
     *
 | 
			
		||||
     * @param name         The agent's given name.
 | 
			
		||||
     * @param ollamaClient The Ollama API client instance to use.
 | 
			
		||||
     * @param model        The model name to use for chat completion.
 | 
			
		||||
     * @param customPrompt A custom prompt to prepend to all conversations (may be null).
 | 
			
		||||
     * @param tools        List of available tools for function calling.
 | 
			
		||||
     */
 | 
			
		||||
    public Agent(
 | 
			
		||||
            String name,
 | 
			
		||||
            Ollama ollamaClient,
 | 
			
		||||
            String model,
 | 
			
		||||
            String customPrompt,
 | 
			
		||||
            List<Tools.Tool> tools) {
 | 
			
		||||
        this.name = name;
 | 
			
		||||
        this.ollamaClient = ollamaClient;
 | 
			
		||||
        this.chatHistory = new ArrayList<>();
 | 
			
		||||
        this.tools = tools;
 | 
			
		||||
        this.model = model;
 | 
			
		||||
        this.customPrompt = customPrompt;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Loads and constructs an Agent from a YAML configuration file (classpath or filesystem).
 | 
			
		||||
     *
 | 
			
		||||
     * <p>The YAML should define the agent, the model, and the desired tool functions (using their
 | 
			
		||||
     * fully qualified class names for auto-discovery).
 | 
			
		||||
     *
 | 
			
		||||
     * @param yamlPathOrResource Path or classpath resource name of the YAML file.
 | 
			
		||||
     * @return New Agent instance loaded according to the YAML definition.
 | 
			
		||||
     * @throws RuntimeException if the YAML cannot be read or agent cannot be constructed.
 | 
			
		||||
     */
 | 
			
		||||
    public static Agent load(String yamlPathOrResource) {
 | 
			
		||||
        try {
 | 
			
		||||
            ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
 | 
			
		||||
 | 
			
		||||
            InputStream input =
 | 
			
		||||
                    Agent.class.getClassLoader().getResourceAsStream(yamlPathOrResource);
 | 
			
		||||
            if (input == null) {
 | 
			
		||||
                java.nio.file.Path filePath = java.nio.file.Paths.get(yamlPathOrResource);
 | 
			
		||||
                if (java.nio.file.Files.exists(filePath)) {
 | 
			
		||||
                    input = java.nio.file.Files.newInputStream(filePath);
 | 
			
		||||
                } else {
 | 
			
		||||
                    throw new RuntimeException(
 | 
			
		||||
                            yamlPathOrResource + " not found in classpath or file system");
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            AgentSpec agentSpec = mapper.readValue(input, AgentSpec.class);
 | 
			
		||||
            List<AgentToolSpec> tools = agentSpec.getTools();
 | 
			
		||||
            for (AgentToolSpec tool : tools) {
 | 
			
		||||
                String fqcn = tool.getToolFunctionFQCN();
 | 
			
		||||
                if (fqcn != null && !fqcn.isEmpty()) {
 | 
			
		||||
                    try {
 | 
			
		||||
                        Class<?> clazz = Class.forName(fqcn);
 | 
			
		||||
                        Object instance = clazz.getDeclaredConstructor().newInstance();
 | 
			
		||||
                        if (instance instanceof ToolFunction) {
 | 
			
		||||
                            tool.setToolFunctionInstance((ToolFunction) instance);
 | 
			
		||||
                        } else {
 | 
			
		||||
                            throw new RuntimeException(
 | 
			
		||||
                                    "Class does not implement ToolFunction: " + fqcn);
 | 
			
		||||
                        }
 | 
			
		||||
                    } catch (Exception e) {
 | 
			
		||||
                        throw new RuntimeException(
 | 
			
		||||
                                "Failed to instantiate tool function: " + fqcn, e);
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            List<Tools.Tool> agentTools = new ArrayList<>();
 | 
			
		||||
            for (AgentToolSpec a : tools) {
 | 
			
		||||
                Tools.Tool t = new Tools.Tool();
 | 
			
		||||
                t.setToolFunction(a.getToolFunctionInstance());
 | 
			
		||||
                Tools.ToolSpec ts = new Tools.ToolSpec();
 | 
			
		||||
                ts.setName(a.getName());
 | 
			
		||||
                ts.setDescription(a.getDescription());
 | 
			
		||||
                ts.setParameters(a.getParameters());
 | 
			
		||||
                t.setToolSpec(ts);
 | 
			
		||||
                agentTools.add(t);
 | 
			
		||||
            }
 | 
			
		||||
            Ollama ollama = new Ollama(agentSpec.getHost());
 | 
			
		||||
            ollama.setRequestTimeoutSeconds(120);
 | 
			
		||||
            return new Agent(
 | 
			
		||||
                    agentSpec.getName(),
 | 
			
		||||
                    ollama,
 | 
			
		||||
                    agentSpec.getModel(),
 | 
			
		||||
                    agentSpec.getCustomPrompt(),
 | 
			
		||||
                    agentTools);
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new RuntimeException("Failed to load agent from YAML", e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Facilitates a single round of chat for the agent:
 | 
			
		||||
     *
 | 
			
		||||
     * <ul>
 | 
			
		||||
     *   <li>Builds/promotes the system prompt on the first turn if necessary
 | 
			
		||||
     *   <li>Adds the user's input to chat history
 | 
			
		||||
     *   <li>Submits the chat turn to the Ollama model (with tool/function support)
 | 
			
		||||
     *   <li>Updates internal chat history in accordance with the Ollama chat result
 | 
			
		||||
     * </ul>
 | 
			
		||||
     *
 | 
			
		||||
     * @param userInput The user's message or question for the agent.
 | 
			
		||||
     * @return The model's response as a string.
 | 
			
		||||
     * @throws OllamaException If there is a problem with the Ollama API.
 | 
			
		||||
     */
 | 
			
		||||
    public String interact(String userInput, OllamaChatStreamObserver chatTokenHandler)
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        // Build a concise and readable description of available tools
 | 
			
		||||
        String availableToolsDescription =
 | 
			
		||||
                tools.isEmpty()
 | 
			
		||||
                        ? ""
 | 
			
		||||
                        : tools.stream()
 | 
			
		||||
                                .map(
 | 
			
		||||
                                        t ->
 | 
			
		||||
                                                String.format(
 | 
			
		||||
                                                        "- %s: %s",
 | 
			
		||||
                                                        t.getToolSpec().getName(),
 | 
			
		||||
                                                        t.getToolSpec().getDescription() != null
 | 
			
		||||
                                                                ? t.getToolSpec().getDescription()
 | 
			
		||||
                                                                : "No description"))
 | 
			
		||||
                                .reduce((a, b) -> a + "\n" + b)
 | 
			
		||||
                                .map(desc -> "\nYou have access to the following tools:\n" + desc)
 | 
			
		||||
                                .orElse("");
 | 
			
		||||
 | 
			
		||||
        // Add system prompt if chatHistory is empty
 | 
			
		||||
        if (chatHistory.isEmpty()) {
 | 
			
		||||
            String systemPrompt =
 | 
			
		||||
                    String.format(
 | 
			
		||||
                            "You are a helpful AI assistant named %s. Your actions are limited to"
 | 
			
		||||
                                    + " using the available tools. %s%s",
 | 
			
		||||
                            name,
 | 
			
		||||
                            (customPrompt != null ? customPrompt : ""),
 | 
			
		||||
                            availableToolsDescription);
 | 
			
		||||
            chatHistory.add(new OllamaChatMessage(OllamaChatMessageRole.SYSTEM, systemPrompt));
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // Add the user input as a message before sending request
 | 
			
		||||
        chatHistory.add(new OllamaChatMessage(OllamaChatMessageRole.USER, userInput));
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequest request =
 | 
			
		||||
                OllamaChatRequest.builder()
 | 
			
		||||
                        .withTools(tools)
 | 
			
		||||
                        .withUseTools(true)
 | 
			
		||||
                        .withModel(model)
 | 
			
		||||
                        .withMessages(chatHistory)
 | 
			
		||||
                        .build();
 | 
			
		||||
        OllamaChatResult response = ollamaClient.chat(request, chatTokenHandler);
 | 
			
		||||
 | 
			
		||||
        // Update chat history for continuity
 | 
			
		||||
        chatHistory.clear();
 | 
			
		||||
        chatHistory.addAll(response.getChatHistory());
 | 
			
		||||
 | 
			
		||||
        return response.getResponseModel().getMessage().getResponse();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Launches an endless interactive console session with the agent, echoing user input and the
 | 
			
		||||
     * agent's response using the provided chat model and tools.
 | 
			
		||||
     *
 | 
			
		||||
     * <p>Type {@code exit} to break the loop and terminate the session.
 | 
			
		||||
     *
 | 
			
		||||
     * @throws OllamaException if any errors occur talking to the Ollama API.
 | 
			
		||||
     */
 | 
			
		||||
    public void runInteractive() throws OllamaException {
 | 
			
		||||
        Scanner sc = new Scanner(System.in);
 | 
			
		||||
        while (true) {
 | 
			
		||||
            System.out.print("\n[You]: ");
 | 
			
		||||
            String input = sc.nextLine();
 | 
			
		||||
            if ("exit".equalsIgnoreCase(input)) break;
 | 
			
		||||
            this.interact(
 | 
			
		||||
                    input,
 | 
			
		||||
                    new OllamaChatStreamObserver(
 | 
			
		||||
                            new ConsoleOutputGenerateTokenHandler(),
 | 
			
		||||
                            new ConsoleOutputGenerateTokenHandler()));
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Bean describing an agent as definable from YAML.
 | 
			
		||||
     *
 | 
			
		||||
     * <ul>
 | 
			
		||||
     *   <li>{@code name}: Agent display name
 | 
			
		||||
     *   <li>{@code description}: Freeform description
 | 
			
		||||
     *   <li>{@code tools}: List of tools/functions to enable
 | 
			
		||||
     *   <li>{@code host}: Target Ollama host address
 | 
			
		||||
     *   <li>{@code model}: Name of Ollama model to use
 | 
			
		||||
     *   <li>{@code customPrompt}: Agent's custom base prompt
 | 
			
		||||
     *   <li>{@code requestTimeoutSeconds}: Timeout for requests
 | 
			
		||||
     * </ul>
 | 
			
		||||
     */
 | 
			
		||||
    @Data
 | 
			
		||||
    public static class AgentSpec {
 | 
			
		||||
        private String name;
 | 
			
		||||
        private String description;
 | 
			
		||||
        private List<AgentToolSpec> tools;
 | 
			
		||||
        private String host;
 | 
			
		||||
        private String model;
 | 
			
		||||
        private String customPrompt;
 | 
			
		||||
        private int requestTimeoutSeconds;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Subclass extension of {@link Tools.ToolSpec}, which allows associating a tool with a function
 | 
			
		||||
     * implementation (via FQCN).
 | 
			
		||||
     */
 | 
			
		||||
    @Data
 | 
			
		||||
    @Setter
 | 
			
		||||
    @Getter
 | 
			
		||||
    @EqualsAndHashCode(callSuper = false)
 | 
			
		||||
    private static class AgentToolSpec extends Tools.ToolSpec {
 | 
			
		||||
        /**
 | 
			
		||||
         * Fully qualified class name of the tool's {@link ToolFunction} implementation
 | 
			
		||||
         */
 | 
			
		||||
        private String toolFunctionFQCN = null;
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Instance of the {@link ToolFunction} to invoke
 | 
			
		||||
         */
 | 
			
		||||
        private ToolFunction toolFunctionInstance = null;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
     * Bean for describing a tool function parameter for use in agent YAML definitions.
 | 
			
		||||
     */
 | 
			
		||||
    @Data
 | 
			
		||||
    public class AgentToolParameter {
 | 
			
		||||
        /**
 | 
			
		||||
         * The parameter's type (e.g., string, number, etc.)
 | 
			
		||||
         */
 | 
			
		||||
        private String type;
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Description of the parameter
 | 
			
		||||
         */
 | 
			
		||||
        private String description;
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Whether this parameter is required
 | 
			
		||||
         */
 | 
			
		||||
        private boolean required;
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Enum values (if any) that this parameter may take; _enum used because 'enum' is reserved
 | 
			
		||||
         */
 | 
			
		||||
        private List<String> _enum; // `enum` is a reserved keyword, so use _enum or similar
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -141,7 +141,6 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
                responseBuffer);
 | 
			
		||||
        if (statusCode != 200) {
 | 
			
		||||
            LOG.error("Status code: {}", statusCode);
 | 
			
		||||
            System.out.println(responseBuffer);
 | 
			
		||||
            throw new OllamaException(responseBuffer.toString());
 | 
			
		||||
        }
 | 
			
		||||
        if (wantedToolsForStream != null && ollamaChatResponseModel != null) {
 | 
			
		||||
 
 | 
			
		||||
@@ -136,6 +136,7 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
                            thinkingBuffer.toString(),
 | 
			
		||||
                            endTime - startTime,
 | 
			
		||||
                            statusCode);
 | 
			
		||||
            if (ollamaGenerateResponseModel != null) {
 | 
			
		||||
                ollamaResult.setModel(ollamaGenerateResponseModel.getModel());
 | 
			
		||||
                ollamaResult.setCreatedAt(ollamaGenerateResponseModel.getCreatedAt());
 | 
			
		||||
                ollamaResult.setDone(ollamaGenerateResponseModel.isDone());
 | 
			
		||||
@@ -144,10 +145,12 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
                ollamaResult.setTotalDuration(ollamaGenerateResponseModel.getTotalDuration());
 | 
			
		||||
                ollamaResult.setLoadDuration(ollamaGenerateResponseModel.getLoadDuration());
 | 
			
		||||
                ollamaResult.setPromptEvalCount(ollamaGenerateResponseModel.getPromptEvalCount());
 | 
			
		||||
            ollamaResult.setPromptEvalDuration(ollamaGenerateResponseModel.getPromptEvalDuration());
 | 
			
		||||
                ollamaResult.setPromptEvalDuration(
 | 
			
		||||
                        ollamaGenerateResponseModel.getPromptEvalDuration());
 | 
			
		||||
                ollamaResult.setEvalCount(ollamaGenerateResponseModel.getEvalCount());
 | 
			
		||||
                ollamaResult.setEvalDuration(ollamaGenerateResponseModel.getEvalDuration());
 | 
			
		||||
 | 
			
		||||
            }
 | 
			
		||||
            LOG.debug("Model plain response: {}", ollamaGenerateResponseModel);
 | 
			
		||||
            LOG.debug("Model response: {}", ollamaResult);
 | 
			
		||||
            return ollamaResult;
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,11 @@ package io.github.ollama4j.tools;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonIgnore;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonInclude;
 | 
			
		||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
			
		||||
import com.fasterxml.jackson.core.type.TypeReference;
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import com.fasterxml.jackson.databind.node.ObjectNode;
 | 
			
		||||
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
@@ -116,4 +120,53 @@ public class Tools {
 | 
			
		||||
 | 
			
		||||
        @JsonIgnore private boolean required;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static List<Tool> fromJSONFile(String filePath, Map<String, ToolFunction> functionMap) {
 | 
			
		||||
        try {
 | 
			
		||||
            ObjectMapper mapper = new ObjectMapper();
 | 
			
		||||
            List<Map<String, Object>> rawTools =
 | 
			
		||||
                    mapper.readValue(
 | 
			
		||||
                            new File(filePath),
 | 
			
		||||
                            new com.fasterxml.jackson.core.type.TypeReference<>() {});
 | 
			
		||||
 | 
			
		||||
            List<Tool> tools = new ArrayList<>();
 | 
			
		||||
 | 
			
		||||
            for (Map<String, Object> rawTool : rawTools) {
 | 
			
		||||
                String json = mapper.writeValueAsString(rawTool);
 | 
			
		||||
                Tool tool = mapper.readValue(json, Tool.class);
 | 
			
		||||
                String toolName = tool.getToolSpec().getName();
 | 
			
		||||
                for (Map.Entry<String, ToolFunction> toolFunctionEntry : functionMap.entrySet()) {
 | 
			
		||||
                    if (toolFunctionEntry.getKey().equals(toolName)) {
 | 
			
		||||
                        tool.setToolFunction(toolFunctionEntry.getValue());
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                tools.add(tool);
 | 
			
		||||
            }
 | 
			
		||||
            return tools;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new RuntimeException("Failed to load tools from file: " + filePath, e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static List<Tool> fromYAMLFile(String filePath, Map<String, ToolFunction> functionMap) {
 | 
			
		||||
        try {
 | 
			
		||||
            ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
 | 
			
		||||
            List<Map<String, Object>> rawTools =
 | 
			
		||||
                    mapper.readValue(new File(filePath), new TypeReference<>() {});
 | 
			
		||||
            List<Tool> tools = new ArrayList<>();
 | 
			
		||||
            for (Map<String, Object> rawTool : rawTools) {
 | 
			
		||||
                String yaml = mapper.writeValueAsString(rawTool);
 | 
			
		||||
                Tool tool = mapper.readValue(yaml, Tool.class);
 | 
			
		||||
                String toolName = tool.getToolSpec().getName();
 | 
			
		||||
                ToolFunction function = functionMap.get(toolName);
 | 
			
		||||
                if (function != null) {
 | 
			
		||||
                    tool.setToolFunction(function);
 | 
			
		||||
                }
 | 
			
		||||
                tools.add(tool);
 | 
			
		||||
            }
 | 
			
		||||
            return tools;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new RuntimeException("Failed to load tools from YAML file: " + filePath, e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user