diff --git a/docs/docs/apis-generate/chat-with-tools.md b/docs/docs/apis-generate/chat-with-tools.md
new file mode 100644
index 0000000..577c26e
--- /dev/null
+++ b/docs/docs/apis-generate/chat-with-tools.md
@@ -0,0 +1,69 @@
+---
+sidebar_position: 8
+---
+
+import CodeEmbed from '@site/src/components/CodeEmbed';
+
+# Chat with Tools
+
+### Using Tools in Chat
+
+If you want to have a natural back-and-forth chat experience with tools, you can directly integrate tools into
+the `chat()` method, instead of using the `generateWithTools()` method. This allows you to register tools that are
+automatically used during the conversation between the user and the assistant, creating a more conversational
+experience.
+
+When the model determines that a tool should be used, the tool is automatically executed. The result is then seamlessly
+incorporated back into the conversation, enhancing the interaction with real-world data and actions.
+
+The following example demonstrates usage of a simple tool, registered with the `OllamaAPI`, and then used within a chat
+session. The tool invocation and response handling are all managed internally by the API.
+
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatWithTools.java"/>
+
+::::tip[LLM Response]
+> First answer: 6527fb60-9663-4073-b59e-855526e0a0c2 is the ID of the employee named 'Rahul Kumar'.
+>
+> Second answer:  Kumar is the last name of the employee named 'Rahul Kumar'.
+::::
+
+This tool calling can also be done using the streaming API.
+
+### Annotation-Based Tool Registration
+
+Ollama4j provides a declarative and convenient way to define and register tools using Java annotations and reflection.
+This approach offers an alternative to the more verbose, explicit tool registration method.
+
+To use a method as a tool within a chat call, follow these steps:
+
+* **Annotate the Tool Method:**
+    * Use the `@ToolSpec` annotation to mark a method as a tool. This annotation describes the tool's purpose.
+    * Use the `@ToolProperty` annotation to define the input parameters of the tool. The following data types are
+      currently supported:
+        * `java.lang.String`
+        * `java.lang.Integer`
+        * `java.lang.Boolean`
+        * `java.math.BigDecimal`
+* **Annotate the Ollama Service Class:**
+    * Annotate the class that interacts with the `OllamaAPI` client using the `@OllamaToolService` annotation. Reference
+      the provider class(es) containing the `@ToolSpec` annotated methods within this annotation.
+* **Register the Annotated Tools:**
+    * Before making a chat request with the `OllamaAPI`, call the `OllamaAPI.registerAnnotatedTools()` method. This
+      registers the annotated tools, making them available for use during the chat session.
+
+Let's try an example. Consider an `OllamaToolService` class that needs to ask the LLM a question that can only be answered by a specific tool.
+This tool is implemented within a `GlobalConstantGenerator` class. Following is the code that exposes an annotated method as a tool:
+
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/annotated/GlobalConstantGenerator.java"/>
+
+The annotated method can then be used as a tool in the chat session:
+
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/annotated/AnnotatedToolCallingExample.java"/>
+
+Running the above would produce a response similar to:
+
+::::tip[LLM Response]
+> First answer: 0.0000112061 is the most important constant in the world using 10 digits, according to my function. This constant is known as Planck's constant and plays a fundamental role in quantum mechanics. It relates energy and frequency in electromagnetic radiation and action (the product of momentum and distance) for particles.
+>
+> Second answer: 3-digit constant: 8.001
+::::
diff --git a/docs/docs/apis-generate/chat.md b/docs/docs/apis-generate/chat.md
index d5aabe9..d5a147c 100644
--- a/docs/docs/apis-generate/chat.md
+++ b/docs/docs/apis-generate/chat.md
@@ -9,262 +9,93 @@ import CodeEmbed from '@site/src/components/CodeEmbed';
 This API lets you create a conversation with LLMs. Using this API enables you to ask questions to the model including
 information using the history of already asked questions and the respective answers.
 
+### Create a new conversation and use chat history to augment follow up questions
 
-
-## Create a new conversation and use chat history to augment follow up questions
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.types.OllamaModelType;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
-
-        // create first user question
-        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "What is the capital of France?")
-                .build();
-
-        // start conversation with model
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-
-        System.out.println("First answer: " + chatResult.getResponseModel().getMessage().getContent());
-
-        // create next userQuestion
-        requestModel = builder.withMessages(chatResult.getChatHistory()).withMessage(OllamaChatMessageRole.USER, "And what is the second largest city?").build();
-
-        // "continue" conversation with model
-        chatResult = ollamaAPI.chat(requestModel);
-
-        System.out.println("Second answer: " + chatResult.getResponseModel().getMessage().getContent());
-
-        System.out.println("Chat History: " + chatResult.getChatHistory());
-    }
-}
-
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatExample.java" />
 
 You will get a response similar to:
 
-> First answer: Should be Paris!
+::::tip[LLM Response]
+
+> First answer: The capital of France is Paris.
 >
-> Second answer: Marseille.
+> Second answer: The second-largest city in France is Marseille.
 >
 > Chat History:
 
 ```json
-[
-  {
-    "role": "user",
-    "content": "What is the capital of France?",
-    "images": []
-  },
-  {
-    "role": "assistant",
-    "content": "Should be Paris!",
-    "images": []
-  },
-  {
-    "role": "user",
-    "content": "And what is the second largest city?",
-    "images": []
-  },
-  {
-    "role": "assistant",
-    "content": "Marseille.",
-    "images": []
-  }
-]
+[{
+  "role" : "user",
+  "content" : "What is the capital of France?",
+  "images" : null,
+  "tool_calls" : [ ]
+}, {
+  "role" : "assistant",
+  "content" : "The capital of France is Paris.",
+  "images" : null,
+  "tool_calls" : null
+}, {
+  "role" : "user",
+  "content" : "And what is the second largest city?",
+  "images" : null,
+  "tool_calls" : [ ]
+}, {
+  "role" : "assistant",
+  "content" : "The second-largest city in France is Marseille.",
+  "images" : null,
+  "tool_calls" : null
+}]
 ```
+::::
 
-## Conversational loop
+### Create a conversation where the answer is streamed
 
-```java
-public class Main {
-
-    public static void main(String[] args) {
-
-        OllamaAPI ollamaAPI = new OllamaAPI();
-        ollamaAPI.setRequestTimeoutSeconds(60);
-
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("<your-model>");
-
-        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "<your-first-message>").build();
-        OllamaChatResult initialChatResult = ollamaAPI.chat(requestModel);
-        System.out.println(initialChatResult.getResponse());
-
-        List<OllamaChatMessage> history = initialChatResult.getChatHistory();
-
-        while (true) {
-            OllamaChatResult chatResult = ollamaAPI.chat(builder.withMessages(history).withMessage(OllamaChatMessageRole.USER, "<your-new-message").build());
-            System.out.println(chatResult.getResponse());
-            history = chatResult.getChatHistory();
-        }
-    }
-}
-```
-
-## Create a conversation where the answer is streamed
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.models.generate.OllamaStreamHandler;
-
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
-        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER,
-                        "What is the capital of France? And what's France's connection with Mona Lisa?")
-                .build();
-
-        // define a handler (Consumer<String>)
-        OllamaStreamHandler streamHandler = (s) -> {
-            System.out.println(s);
-        };
-
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel, streamHandler);
-    }
-}
-```
-
-You will get a response similar to:
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatStreamingWithTokenConcatenationExample.java" />
 
+::::tip[LLM Response]
+>
 > The
+>
 > The capital
+>
 > The capital of
+>
 > The capital of France
+>
 > The capital of France is
+>
 > The capital of France is Paris
+>
 > The capital of France is Paris.
+>
+::::
 
-## Use a simple Console Output Stream Handler
+### Using a simple Console Output Stream Handler
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.impl.ConsoleOutputStreamHandler;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.generate.OllamaStreamHandler;
-import io.github.ollama4j.types.OllamaModelType;
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ConsoleOutputStreamHandlerExample.java" />
 
-public class Main {
-    public static void main(String[] args) throws Exception {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
+### With a Stream Handler to receive the tokens as they are generated
 
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
-        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.USER, "List all cricket world cup teams of 2019. Name the teams!")
-                .build();
-        OllamaStreamHandler streamHandler = new ConsoleOutputStreamHandler();
-        ollamaAPI.chat(requestModel, streamHandler);
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatStreamingExample.java" />
 
-## Create a new conversation with individual system prompt
+### Create a new conversation with custom system prompt
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.types.OllamaModelType;
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatWithCustomSystemPrompt.java" />
+
+You will get a response as:
+
+::::tip[LLM Response]
+> Shhh!
+::::
 
 
-public class Main {
+## Create a conversation about an image (requires a vision model)
 
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAMA2);
-
-        // create request with system-prompt (overriding the model defaults) and user question
-        OllamaChatRequest requestModel = builder.withMessage(OllamaChatMessageRole.SYSTEM, "You are a silent bot that only says 'NI'. Do not say anything else under any circumstances!")
-                .withMessage(OllamaChatMessageRole.USER, "What is the capital of France? And what's France's connection with Mona Lisa?")
-                .build();
-
-        // start conversation with model
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-
-        System.out.println(chatResult.getResponseModel());
-    }
-}
-
-```
-
-You will get a response similar to:
-
-> NI.
-
-## Create a conversation about an image (requires model with image recognition skills)
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.types.OllamaModelType;
-
-import java.io.File;
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(OllamaModelType.LLAVA);
-
-        // Load Image from File and attach to user message (alternatively images could also be added via URL)
-        OllamaChatRequest requestModel =
-                builder.withMessage(OllamaChatMessageRole.USER, "What's in the picture?",
-                        List.of(
-                                new File("/path/to/image"))).build();
-
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-        System.out.println("First answer: " + chatResult.getResponseModel());
-
-        builder.reset();
-
-        // Use history to ask further questions about the image or assistant answer
-        requestModel =
-                builder.withMessages(chatResult.getChatHistory())
-                        .withMessage(OllamaChatMessageRole.USER, "What's the dogs breed?").build();
-
-        chatResult = ollamaAPI.chat(requestModel);
-        System.out.println("Second answer: " + chatResult.getResponseModel());
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatWithImage.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 > First Answer: The image shows a dog sitting on the bow of a boat that is docked in calm water. The boat has two
 > levels, with the lower level containing seating and what appears to be an engine cover. The dog seems relaxed and
 > comfortable on the boat, looking out over the water. The background suggests it might be late afternoon or early
@@ -274,5 +105,4 @@ You will get a response similar to:
 > appears to be medium-sized with a short coat and a brown coloration, which might suggest that it is a Golden Retriever
 > or a similar breed. Without more details like ear shape and tail length, it's not possible to identify the exact breed
 > confidently.
-
-<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/ChatExample.java" />
\ No newline at end of file
+::::
diff --git a/docs/docs/apis-generate/custom-roles.md b/docs/docs/apis-generate/custom-roles.md
index c735827..44df8b1 100644
--- a/docs/docs/apis-generate/custom-roles.md
+++ b/docs/docs/apis-generate/custom-roles.md
@@ -1,5 +1,5 @@
 ---
-sidebar_position: 8
+sidebar_position: 9
 ---
 
 # Custom Roles
diff --git a/docs/docs/apis-generate/generate-async.md b/docs/docs/apis-generate/generate-async.md
index 1b8b47c..1f2ca9e 100644
--- a/docs/docs/apis-generate/generate-async.md
+++ b/docs/docs/apis-generate/generate-async.md
@@ -2,7 +2,9 @@
 sidebar_position: 2
 ---
 
-# Generate - Async
+import CodeEmbed from '@site/src/components/CodeEmbed';
+
+# Generate (Async)
 
 This API lets you ask questions to the LLMs in a asynchronous way.
 This is particularly helpful when you want to issue a generate request to the LLM and collect the response in the
@@ -11,38 +13,18 @@ background (such as threads) without blocking your code until the response arriv
 This API corresponds to
 the [completion](https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion) API.
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.response.OllamaAsyncResultStreamer;
-import io.github.ollama4j.types.OllamaModelType;
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateAsync.java" />
 
-public class Main {
+::::tip[LLM Response]
+Here are the participating teams in the 2019 ICC Cricket World Cup:
 
-    public static void main(String[] args) throws Exception {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        ollamaAPI.setRequestTimeoutSeconds(60);
-        String prompt = "List all cricket world cup teams of 2019.";
-        OllamaAsyncResultStreamer streamer = ollamaAPI.generateAsync(OllamaModelType.LLAMA3, prompt, false);
-
-        // Set the poll interval according to your needs. 
-        // Smaller the poll interval, more frequently you receive the tokens.
-        int pollIntervalMilliseconds = 1000;
-
-        while (true) {
-            String tokens = streamer.getStream().poll();
-            System.out.print(tokens);
-            if (!streamer.isAlive()) {
-                break;
-            }
-            Thread.sleep(pollIntervalMilliseconds);
-        }
-
-        System.out.println("\n------------------------");
-        System.out.println("Complete Response:");
-        System.out.println("------------------------");
-
-        System.out.println(streamer.getCompleteResponse());
-    }
-}
-```
\ No newline at end of file
+1. Australia
+2. Bangladesh
+3. India
+4. New Zealand
+5. Pakistan
+6. England
+7. South Africa
+8. West Indies (as a team)
+9. Afghanistan
+::::
\ No newline at end of file
diff --git a/docs/docs/apis-generate/generate-embeddings.md b/docs/docs/apis-generate/generate-embeddings.md
index ae2aaa1..1adcde9 100644
--- a/docs/docs/apis-generate/generate-embeddings.md
+++ b/docs/docs/apis-generate/generate-embeddings.md
@@ -1,112 +1,49 @@
 ---
-sidebar_position: 6
+sidebar_position: 5
 ---
 
+import CodeEmbed from '@site/src/components/CodeEmbed';
+
 # Generate Embeddings
 
 Generate embeddings from a model.
 
-Parameters:
+### Using `embed()`
 
-- `model`: name of model to generate embeddings from
-- `input`: text/s to generate embeddings for
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateEmbeddings.java" />
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
-import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-
-        OllamaEmbedResponseModel embeddings = ollamaAPI.embed("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?"));
-
-        System.out.println(embeddings);
-    }
-}
+::::tip[LLM Response]
+```json
+[[0.010000081, -0.0017487297, 0.050126992, 0.04694895, 0.055186987, 0.008570699, 0.10545243, -0.02591801, 0.1296789, 0.031844463, -0.044533115, -0.009081162, -4.7557743E-4, -0.06383077, -0.016083026, 0.04666039, -0.022107942, -0.15835331, -0.07281923, -0.061205965, -0.06593526, 0.054214016, -0.062174935, 0.038974375, -0.04570855, 0.05495598, -0.035383187, 0.012725615, 0.04252694, -0.00806814, -0.019041134, 0.061063103, 0.036943648, 0.013529706, -0.025880618, -0.04349401, 0.07276639, -0.048439376, 0.004148429, -0.029453786, -0.029147545, -0.03282039, -0.018276647, 0.0155515345, -0.011599436, 0.015321048, -0.009423502, 0.02589781, 0.095306225, -0.015580891, -0.024553236, 0.009203469, -0.07652067, 0.01593109, 0.049590923, 0.11590031, 9.564879E-4, -0.020308463, 0.09234688, 0.008461708, -0.057039093, 0.06883451, -0.07657848, 0.06934012, 0.09227977, -0.055535857, -0.05371766, 0.008418901, -0.06311155, -0.066414595, -0.025158273, 0.018782357, 0.061182138, -0.028296644, 0.0362281, 0.001123205, 0.060663134, -0.06755974, -0.008144066, -0.012715194, 0.031090235, -0.06392361, -0.07458864, 0.11904344, 0.012541205, 0.06530589, 0.014893975, 0.051452246, -0.0850448, 0.010324387, -0.007704823, -0.035547107, -0.115298286, -0.030618181, -0.08327795, 0.013764867, 0.05644683, -0.040965002, 0.042660262, 0.022258658, 0.046835635, -0.051371332, 0.030119192, 0.007202741, -0.004240163, -0.031205537, 0.077798784, 0.034248676, 0.06132544, 0.0074892035, -0.036300045, -0.08461072, 0.021714637, -0.019322984, -0.0398159, 0.054820538, -0.033751138, 0.018145457, -0.105456986, -0.050408557, -0.011556143, 0.037754424, 0.022083722, 0.08057535, 0.007969883, -0.016802859, -0.059379302, -7.2237E-33, 0.1354213, -0.011238707, 0.092283085, 0.03602569, 0.039765336, -0.054793786, -0.03515187, -0.0036293974, -0.019418892, -0.034990944, -0.005830097, -0.014649367, -0.024272997, -0.048353665, 0.04776005, -0.017107947, -0.06098698, 0.0058933506, -0.08300741, 0.084322065, -0.104518674, 0.04162716, -0.036671404, -0.008064532, -0.02820598, -0.043205056, 0.036074184, 0.07484778, 0.05651205, 0.011898618, 0.09834075, 0.104802914, -0.021922145, 0.04598351, -0.026300702, -0.050922275, -0.014775197, -0.0064015454, -0.08584967, 0.028555173, -0.05346807, 0.05654622, -0.059783902, 0.012294972, 0.06624266, -0.013547566, 0.038316876, -0.08873539, -0.057546746, 0.03204543, -0.03449219, 0.023742337, 0.014367529, -0.04160565, 0.06808427, 0.031186322, 0.06963124, -0.034979273, -0.0033514828, 0.049272913, -0.0133417705, -0.003452593, 0.050814334, 0.07870213, 0.037588608, -0.011567854, 0.038298655, 0.041919343, -0.012816205, -0.078975335, 0.009014773, 0.013231239, 0.024213182, 0.009769919, -0.010969022, -0.08174755, 0.026874617, -0.029649356, -0.004314064, 0.012965783, -0.03528545, -0.019647561, 0.055427335, -0.06122222, -0.054911185, 0.012418541, -0.019181116, -0.012523167, -0.015836857, -0.06933424, -0.044998724, -0.049169958, 0.048181616, -0.10435304, -0.1079065, 3.5844724E-33, -5.2857434E-4, -0.086338826, -0.087730855, 0.0071089785, -0.0075092614, -0.016718967, 0.045219034, 0.067585975, -0.042870898, 0.0863409, 0.045438178, 0.06795051, 0.009950505, -0.0029959748, 0.058502916, -0.035608236, 0.036216073, 0.066100344, -0.03785426, -0.062264763, -0.04450461, 0.07720427, 0.043325383, -0.021296863, -0.0217195, 0.062334213, -0.0391459, 0.028083341, -0.013057875, 0.051180184, -0.036750164, 0.054655746, -0.066471115, 0.022967137, 0.0047796182, 0.09052008, 0.005167651, -0.0830967, -0.055065937, 0.07320647, -0.11054101, -0.020116722, 0.11247867, -0.053230446, -0.057548687, -0.023836475, 0.056587286, 0.12725416, 0.036107734, -0.043944683, 0.017000921, -0.024768567, 0.07276523, 0.043141358, 0.08048159, -0.019533968, -0.03447826, 0.096703045, 0.051834024, 0.010554283, 0.04019631, 0.0020927596, -0.007590705, 0.0016895492, 0.014211798, 0.02047324, -0.023020415, 0.021562004, -0.00928612, -0.050591297, -0.01619291, -0.08997802, -0.060895078, 0.08100429, 0.0022806204, 0.041729365, 0.043731183, -0.025113516, -0.09526692, 0.08865304, -0.09853454, -0.0048426827, 0.035341848, 0.0143458955, -0.064700805, -0.07586452, 0.012436738, -0.05000536, -0.05567604, -0.056878153, -0.018541014, -0.0021473207, -0.0022214772, 0.035333972, -0.05470233, -1.4311088E-8, -0.00807994, 0.026629517, 0.002253397, 0.009933685, -0.02166608, -0.021526324, 0.11142737, 0.0047573056, 0.03775078, 0.0039694835, -0.066885866, -0.028193833, -0.044485897, 0.071223155, 0.018815499, -0.049034107, -0.10390887, -0.043636143, 0.010163606, 0.0418435, -0.013363032, -0.033802148, -0.025276663, -0.013619332, 0.0033778746, 0.033192083, -0.021926358, 0.022021232, 0.071396865, 0.020565767, 0.024445849, 0.035862394, -0.001007896, -0.061173376, -0.08546204, 0.0073751807, -0.038680665, 0.07989097, -0.025537722, -0.060529694, 0.060663767, 0.082347505, -0.056607824, 0.004820212, 0.045103956, 0.023633804, 0.043377202, 0.09108467, -0.051370483, -0.011107505, -0.06897708, 0.007159519, 0.072742105, -0.04338072, 0.025991833, -0.11408352, -0.009605889, 0.022043642, 0.02668666, 0.0038960192, 0.015961006, 0.0036130184, -0.020764133, 0.03348443], [-0.009868476, 0.060335685, 0.025288988, -0.0062160683, 0.07281043, 0.017217565, 0.090314455, -0.051715206, 0.09947815, 0.090584196, 0.0071719657, -0.019594174, -0.075078875, -0.017466826, 0.019347396, 0.040821016, -0.011118273, -0.05854725, -0.12543073, -0.048901077, -0.044018935, 0.031114545, 0.037799157, -0.031743918, -0.0910083, 0.06356124, -0.07640408, 0.08509329, 0.035593998, -0.07126983, 0.021175714, 0.11015013, 0.03325966, -0.02586855, -0.061687328, -0.026381517, 0.020523364, -0.054436196, 0.056599274, 0.032927252, -0.08997798, -0.057034135, 0.026899701, 0.07513233, -0.071349114, -0.004237693, 0.054284442, 0.026307901, 0.078129396, -0.048960682, 0.056613132, -0.04913771, -0.07579886, 0.0069060107, 0.0063763415, 0.036972668, 0.025060013, 0.02181742, 0.01020716, -0.040446986, -0.012050511, -0.0333741, -0.07564401, 0.07132756, -0.02063755, -0.06318399, -0.0013259775, -0.05526086, 0.009020493, -0.08710191, 0.020652942, 0.05299636, -0.009691467, -0.052739665, -0.06480449, 0.042018816, 0.044661146, 0.03273904, -0.01949503, 2.4002639E-4, 0.038351417, 0.050931647, 0.0046273004, 0.057359487, 0.046486866, 0.042649552, -0.017132936, 0.011823174, -0.056949086, -0.035009447, -0.019008413, -0.0074347625, -0.07384615, 0.04393877, -0.09906426, 0.041409962, -0.023977123, -0.12038461, 1.2660562E-4, -0.003843579, 0.05607605, -4.8949895E-4, 0.07111408, -0.036223337, -0.06402696, -0.009451222, -0.042215906, 0.0780337, -0.02371912, 0.007851906, -0.023734692, -0.018583676, -0.033396255, 0.077651344, -0.06062816, 0.053625435, 0.033917766, 0.012533888, -0.032672815, 0.029700326, -0.016981928, 0.0281869, -0.018111452, 0.06656012, -0.06950031, -0.017839137, -0.037340533, -6.835867E-33, -0.0055358508, -0.031647824, 0.048153512, -0.09928821, 0.093719974, -0.051801622, -0.036978923, -0.026406623, -0.037407376, -0.030371925, 0.0061302963, -0.0982039, -0.017523993, -0.07252802, 0.03850968, 0.008900545, -0.13075978, 0.021422677, -0.075673044, -0.01029629, -0.017209169, 0.05839448, 0.021881068, 0.0015664459, 0.009339571, -0.0314211, 0.080039345, 0.030477382, 0.056920107, -0.012233044, 0.11584086, 0.0040954757, 0.017554974, 0.04336152, 0.029307723, -0.0068410966, -0.025106648, -0.026154919, 0.013292939, 0.121521436, -0.0045275204, -0.045257635, 0.04338726, -0.017685922, 0.06218186, -0.03933278, 0.01735304, 0.008501952, -0.026153479, 0.010192709, 0.023137128, 0.053122364, 0.0690913, 0.05310471, -2.4858408E-4, 0.0096703665, 0.08885624, -0.030429514, -0.09079217, -0.05130283, -0.07371132, 0.08618689, 0.0033454685, 0.010966452, -0.008318914, -0.011407435, -0.029051095, 0.06551204, 0.0054419613, 0.06888426, 0.04281082, -0.018798476, -0.016944146, -0.036808517, -0.006201687, -0.08704525, -0.008700292, -0.013294283, -0.0046790023, 0.042446434, -0.03576464, 0.017382197, -0.08722518, -0.051042743, -0.14942876, -0.0020754328, -0.026410934, 0.018064674, 0.021632154, -0.015141376, 0.003725257, -0.01553318, -0.01327707, -0.098052144, -0.011788692, 2.8246808E-33, -0.022846783, 0.008986764, -0.005797001, 0.066113144, 0.042615797, -0.039735403, 0.027211439, 0.032038726, -0.028711542, 0.04562696, -0.05573173, 0.09247299, -0.046873886, 0.080259465, 0.1185751, 0.048753165, -0.06736519, 0.10693395, 0.009406613, -0.051305998, -0.06782429, 0.015892766, -0.010205388, 0.044131745, -0.030017989, 0.022811258, -0.031549934, -0.022227649, -0.0023231048, -0.00993543, 0.00321092, -0.036961444, -0.112086535, 0.028652154, 0.030226015, 0.031845823, -0.017391989, -0.018336339, -0.036368545, 0.08388451, 0.0079623535, -0.023648826, 0.15845971, 0.03212625, -0.07040057, -0.03407339, -0.0154629275, 0.07972614, -0.062146895, 0.046385817, 0.045025956, 0.10424084, 0.029551638, 0.04803619, 0.09714898, -0.015759284, -0.06423742, 0.019712195, -0.09390805, -0.0027662653, 0.019718736, -0.027150705, -0.07877321, 0.068945184, 0.10764347, -0.033046924, -0.06445677, 0.016665269, 0.01923717, -0.022991046, -0.01895388, -0.018677948, -0.064167276, -0.032140236, -0.00278987, 0.05908604, 0.05976127, -0.044849344, -0.066649735, 0.043921255, -0.01992474, -0.0067276787, 0.047306404, -0.046795446, 0.034644924, -0.015249516, -0.039549574, 0.047282677, 0.052053083, 8.0526056E-4, 0.052853532, 0.0245618, -0.02940274, 0.034503356, 0.0131183695, -1.3674445E-8, -0.034011222, 0.0074544777, -0.010649118, 0.043190703, 0.014776448, -0.041673005, 0.10555256, -0.012789219, -0.0024177078, 0.04042407, -0.0813939, 0.033735543, -0.019764481, 0.07754665, 0.060485274, -0.01606053, -0.11340549, -0.04295919, 0.023188036, 0.074550346, -0.06057766, -0.045957167, -0.08755505, 0.053412285, -0.043960545, 0.029779192, 0.038598273, 0.016920617, 0.027635917, 0.0780365, 0.055601224, 0.052999303, -0.010710994, -0.029806744, -0.08503494, -0.01641341, 0.030428788, 0.06950053, -0.062027372, -0.12210805, 0.011878054, 0.072504126, -0.01782069, 0.0031250992, 0.014375631, -0.037944797, -0.052864, 0.060364977, -0.053158067, 0.018045388, -0.1042992, 0.010478333, 0.042992577, 0.04256209, -0.0030047095, -0.08532544, -0.03959884, -0.004523487, 0.013003125, -0.008466674, -0.029106006, -0.035763003, 0.059537675, -1.000059E-4]]
 ```
+::::
 
-Or, using the `OllamaEmbedRequestModel`:
+You could also use the `OllamaEmbedRequestModel` to specify the options such as `seed`, `temperature`, etc., to apply for generating embeddings.
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.models.embeddings.OllamaEmbedRequestModel;
-import io.github.ollama4j.models.embeddings.OllamaEmbedResponseModel;import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-
-        OllamaEmbedResponseModel embeddings = ollamaAPI.embed(new OllamaEmbedRequestModel("all-minilm", Arrays.asList("Why is the sky blue?", "Why is the grass green?")));
-
-        System.out.println(embeddings);
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateEmbeddingsWithRequestModel.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 ```json
-{
-    "model": "all-minilm",
-    "embeddings": [[-0.034674067, 0.030984823, 0.0067988685]],
-    "total_duration": 14173700,
-    "load_duration": 1198800,
-    "prompt_eval_count": 2
-}
-````
+[[0.010000081, -0.0017487297, 0.050126992, 0.04694895, 0.055186987, 0.008570699, 0.10545243, -0.02591801, 0.1296789, 0.031844463, -0.044533115, -0.009081162, -4.7557743E-4, -0.06383077, -0.016083026, 0.04666039, -0.022107942, -0.15835331, -0.07281923, -0.061205965, -0.06593526, 0.054214016, -0.062174935, 0.038974375, -0.04570855, 0.05495598, -0.035383187, 0.012725615, 0.04252694, -0.00806814, -0.019041134, 0.061063103, 0.036943648, 0.013529706, -0.025880618, -0.04349401, 0.07276639, -0.048439376, 0.004148429, -0.029453786, -0.029147545, -0.03282039, -0.018276647, 0.0155515345, -0.011599436, 0.015321048, -0.009423502, 0.02589781, 0.095306225, -0.015580891, -0.024553236, 0.009203469, -0.07652067, 0.01593109, 0.049590923, 0.11590031, 9.564879E-4, -0.020308463, 0.09234688, 0.008461708, -0.057039093, 0.06883451, -0.07657848, 0.06934012, 0.09227977, -0.055535857, -0.05371766, 0.008418901, -0.06311155, -0.066414595, -0.025158273, 0.018782357, 0.061182138, -0.028296644, 0.0362281, 0.001123205, 0.060663134, -0.06755974, -0.008144066, -0.012715194, 0.031090235, -0.06392361, -0.07458864, 0.11904344, 0.012541205, 0.06530589, 0.014893975, 0.051452246, -0.0850448, 0.010324387, -0.007704823, -0.035547107, -0.115298286, -0.030618181, -0.08327795, 0.013764867, 0.05644683, -0.040965002, 0.042660262, 0.022258658, 0.046835635, -0.051371332, 0.030119192, 0.007202741, -0.004240163, -0.031205537, 0.077798784, 0.034248676, 0.06132544, 0.0074892035, -0.036300045, -0.08461072, 0.021714637, -0.019322984, -0.0398159, 0.054820538, -0.033751138, 0.018145457, -0.105456986, -0.050408557, -0.011556143, 0.037754424, 0.022083722, 0.08057535, 0.007969883, -0.016802859, -0.059379302, -7.2237E-33, 0.1354213, -0.011238707, 0.092283085, 0.03602569, 0.039765336, -0.054793786, -0.03515187, -0.0036293974, -0.019418892, -0.034990944, -0.005830097, -0.014649367, -0.024272997, -0.048353665, 0.04776005, -0.017107947, -0.06098698, 0.0058933506, -0.08300741, 0.084322065, -0.104518674, 0.04162716, -0.036671404, -0.008064532, -0.02820598, -0.043205056, 0.036074184, 0.07484778, 0.05651205, 0.011898618, 0.09834075, 0.104802914, -0.021922145, 0.04598351, -0.026300702, -0.050922275, -0.014775197, -0.0064015454, -0.08584967, 0.028555173, -0.05346807, 0.05654622, -0.059783902, 0.012294972, 0.06624266, -0.013547566, 0.038316876, -0.08873539, -0.057546746, 0.03204543, -0.03449219, 0.023742337, 0.014367529, -0.04160565, 0.06808427, 0.031186322, 0.06963124, -0.034979273, -0.0033514828, 0.049272913, -0.0133417705, -0.003452593, 0.050814334, 0.07870213, 0.037588608, -0.011567854, 0.038298655, 0.041919343, -0.012816205, -0.078975335, 0.009014773, 0.013231239, 0.024213182, 0.009769919, -0.010969022, -0.08174755, 0.026874617, -0.029649356, -0.004314064, 0.012965783, -0.03528545, -0.019647561, 0.055427335, -0.06122222, -0.054911185, 0.012418541, -0.019181116, -0.012523167, -0.015836857, -0.06933424, -0.044998724, -0.049169958, 0.048181616, -0.10435304, -0.1079065, 3.5844724E-33, -5.2857434E-4, -0.086338826, -0.087730855, 0.0071089785, -0.0075092614, -0.016718967, 0.045219034, 0.067585975, -0.042870898, 0.0863409, 0.045438178, 0.06795051, 0.009950505, -0.0029959748, 0.058502916, -0.035608236, 0.036216073, 0.066100344, -0.03785426, -0.062264763, -0.04450461, 0.07720427, 0.043325383, -0.021296863, -0.0217195, 0.062334213, -0.0391459, 0.028083341, -0.013057875, 0.051180184, -0.036750164, 0.054655746, -0.066471115, 0.022967137, 0.0047796182, 0.09052008, 0.005167651, -0.0830967, -0.055065937, 0.07320647, -0.11054101, -0.020116722, 0.11247867, -0.053230446, -0.057548687, -0.023836475, 0.056587286, 0.12725416, 0.036107734, -0.043944683, 0.017000921, -0.024768567, 0.07276523, 0.043141358, 0.08048159, -0.019533968, -0.03447826, 0.096703045, 0.051834024, 0.010554283, 0.04019631, 0.0020927596, -0.007590705, 0.0016895492, 0.014211798, 0.02047324, -0.023020415, 0.021562004, -0.00928612, -0.050591297, -0.01619291, -0.08997802, -0.060895078, 0.08100429, 0.0022806204, 0.041729365, 0.043731183, -0.025113516, -0.09526692, 0.08865304, -0.09853454, -0.0048426827, 0.035341848, 0.0143458955, -0.064700805, -0.07586452, 0.012436738, -0.05000536, -0.05567604, -0.056878153, -0.018541014, -0.0021473207, -0.0022214772, 0.035333972, -0.05470233, -1.4311088E-8, -0.00807994, 0.026629517, 0.002253397, 0.009933685, -0.02166608, -0.021526324, 0.11142737, 0.0047573056, 0.03775078, 0.0039694835, -0.066885866, -0.028193833, -0.044485897, 0.071223155, 0.018815499, -0.049034107, -0.10390887, -0.043636143, 0.010163606, 0.0418435, -0.013363032, -0.033802148, -0.025276663, -0.013619332, 0.0033778746, 0.033192083, -0.021926358, 0.022021232, 0.071396865, 0.020565767, 0.024445849, 0.035862394, -0.001007896, -0.061173376, -0.08546204, 0.0073751807, -0.038680665, 0.07989097, -0.025537722, -0.060529694, 0.060663767, 0.082347505, -0.056607824, 0.004820212, 0.045103956, 0.023633804, 0.043377202, 0.09108467, -0.051370483, -0.011107505, -0.06897708, 0.007159519, 0.072742105, -0.04338072, 0.025991833, -0.11408352, -0.009605889, 0.022043642, 0.02668666, 0.0038960192, 0.015961006, 0.0036130184, -0.020764133, 0.03348443], [-0.009868476, 0.060335685, 0.025288988, -0.0062160683, 0.07281043, 0.017217565, 0.090314455, -0.051715206, 0.09947815, 0.090584196, 0.0071719657, -0.019594174, -0.075078875, -0.017466826, 0.019347396, 0.040821016, -0.011118273, -0.05854725, -0.12543073, -0.048901077, -0.044018935, 0.031114545, 0.037799157, -0.031743918, -0.0910083, 0.06356124, -0.07640408, 0.08509329, 0.035593998, -0.07126983, 0.021175714, 0.11015013, 0.03325966, -0.02586855, -0.061687328, -0.026381517, 0.020523364, -0.054436196, 0.056599274, 0.032927252, -0.08997798, -0.057034135, 0.026899701, 0.07513233, -0.071349114, -0.004237693, 0.054284442, 0.026307901, 0.078129396, -0.048960682, 0.056613132, -0.04913771, -0.07579886, 0.0069060107, 0.0063763415, 0.036972668, 0.025060013, 0.02181742, 0.01020716, -0.040446986, -0.012050511, -0.0333741, -0.07564401, 0.07132756, -0.02063755, -0.06318399, -0.0013259775, -0.05526086, 0.009020493, -0.08710191, 0.020652942, 0.05299636, -0.009691467, -0.052739665, -0.06480449, 0.042018816, 0.044661146, 0.03273904, -0.01949503, 2.4002639E-4, 0.038351417, 0.050931647, 0.0046273004, 0.057359487, 0.046486866, 0.042649552, -0.017132936, 0.011823174, -0.056949086, -0.035009447, -0.019008413, -0.0074347625, -0.07384615, 0.04393877, -0.09906426, 0.041409962, -0.023977123, -0.12038461, 1.2660562E-4, -0.003843579, 0.05607605, -4.8949895E-4, 0.07111408, -0.036223337, -0.06402696, -0.009451222, -0.042215906, 0.0780337, -0.02371912, 0.007851906, -0.023734692, -0.018583676, -0.033396255, 0.077651344, -0.06062816, 0.053625435, 0.033917766, 0.012533888, -0.032672815, 0.029700326, -0.016981928, 0.0281869, -0.018111452, 0.06656012, -0.06950031, -0.017839137, -0.037340533, -6.835867E-33, -0.0055358508, -0.031647824, 0.048153512, -0.09928821, 0.093719974, -0.051801622, -0.036978923, -0.026406623, -0.037407376, -0.030371925, 0.0061302963, -0.0982039, -0.017523993, -0.07252802, 0.03850968, 0.008900545, -0.13075978, 0.021422677, -0.075673044, -0.01029629, -0.017209169, 0.05839448, 0.021881068, 0.0015664459, 0.009339571, -0.0314211, 0.080039345, 0.030477382, 0.056920107, -0.012233044, 0.11584086, 0.0040954757, 0.017554974, 0.04336152, 0.029307723, -0.0068410966, -0.025106648, -0.026154919, 0.013292939, 0.121521436, -0.0045275204, -0.045257635, 0.04338726, -0.017685922, 0.06218186, -0.03933278, 0.01735304, 0.008501952, -0.026153479, 0.010192709, 0.023137128, 0.053122364, 0.0690913, 0.05310471, -2.4858408E-4, 0.0096703665, 0.08885624, -0.030429514, -0.09079217, -0.05130283, -0.07371132, 0.08618689, 0.0033454685, 0.010966452, -0.008318914, -0.011407435, -0.029051095, 0.06551204, 0.0054419613, 0.06888426, 0.04281082, -0.018798476, -0.016944146, -0.036808517, -0.006201687, -0.08704525, -0.008700292, -0.013294283, -0.0046790023, 0.042446434, -0.03576464, 0.017382197, -0.08722518, -0.051042743, -0.14942876, -0.0020754328, -0.026410934, 0.018064674, 0.021632154, -0.015141376, 0.003725257, -0.01553318, -0.01327707, -0.098052144, -0.011788692, 2.8246808E-33, -0.022846783, 0.008986764, -0.005797001, 0.066113144, 0.042615797, -0.039735403, 0.027211439, 0.032038726, -0.028711542, 0.04562696, -0.05573173, 0.09247299, -0.046873886, 0.080259465, 0.1185751, 0.048753165, -0.06736519, 0.10693395, 0.009406613, -0.051305998, -0.06782429, 0.015892766, -0.010205388, 0.044131745, -0.030017989, 0.022811258, -0.031549934, -0.022227649, -0.0023231048, -0.00993543, 0.00321092, -0.036961444, -0.112086535, 0.028652154, 0.030226015, 0.031845823, -0.017391989, -0.018336339, -0.036368545, 0.08388451, 0.0079623535, -0.023648826, 0.15845971, 0.03212625, -0.07040057, -0.03407339, -0.0154629275, 0.07972614, -0.062146895, 0.046385817, 0.045025956, 0.10424084, 0.029551638, 0.04803619, 0.09714898, -0.015759284, -0.06423742, 0.019712195, -0.09390805, -0.0027662653, 0.019718736, -0.027150705, -0.07877321, 0.068945184, 0.10764347, -0.033046924, -0.06445677, 0.016665269, 0.01923717, -0.022991046, -0.01895388, -0.018677948, -0.064167276, -0.032140236, -0.00278987, 0.05908604, 0.05976127, -0.044849344, -0.066649735, 0.043921255, -0.01992474, -0.0067276787, 0.047306404, -0.046795446, 0.034644924, -0.015249516, -0.039549574, 0.047282677, 0.052053083, 8.0526056E-4, 0.052853532, 0.0245618, -0.02940274, 0.034503356, 0.0131183695, -1.3674445E-8, -0.034011222, 0.0074544777, -0.010649118, 0.043190703, 0.014776448, -0.041673005, 0.10555256, -0.012789219, -0.0024177078, 0.04042407, -0.0813939, 0.033735543, -0.019764481, 0.07754665, 0.060485274, -0.01606053, -0.11340549, -0.04295919, 0.023188036, 0.074550346, -0.06057766, -0.045957167, -0.08755505, 0.053412285, -0.043960545, 0.029779192, 0.038598273, 0.016920617, 0.027635917, 0.0780365, 0.055601224, 0.052999303, -0.010710994, -0.029806744, -0.08503494, -0.01641341, 0.030428788, 0.06950053, -0.062027372, -0.12210805, 0.011878054, 0.072504126, -0.01782069, 0.0031250992, 0.014375631, -0.037944797, -0.052864, 0.060364977, -0.053158067, 0.018045388, -0.1042992, 0.010478333, 0.042992577, 0.04256209, -0.0030047095, -0.08532544, -0.03959884, -0.004523487, 0.013003125, -0.008466674, -0.029106006, -0.035763003, 0.059537675, -1.000059E-4]]
+```
 
-:::note
+::::
+
+### Using `generateEmbeddings()`
+
+::::danger[NOTE]
 
 This is a deprecated API
 
-:::
+::::
 
-Parameters:
-
-- `model`: name of model to generate embeddings from
-- `prompt`: text to generate embeddings for
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.types.OllamaModelType;
-
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-
-        List<Double> embeddings = ollamaAPI.generateEmbeddings(OllamaModelType.LLAMA2,
-                "Here is an article about llamas...");
-
-        embeddings.forEach(System.out::println);
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateEmbeddingsOld.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 ```javascript
  [
     0.5670403838157654,
@@ -115,4 +52,5 @@ You will get a response similar to:
     -0.2916173040866852,
     -0.8924556970596313
 ]
-```
\ No newline at end of file
+```
+::::
\ No newline at end of file
diff --git a/docs/docs/apis-generate/generate-with-image-files.md b/docs/docs/apis-generate/generate-with-image-files.md
index b26f9ba..5f2e8ea 100644
--- a/docs/docs/apis-generate/generate-with-image-files.md
+++ b/docs/docs/apis-generate/generate-with-image-files.md
@@ -1,8 +1,10 @@
 ---
-sidebar_position: 4
+sidebar_position: 3
 ---
 
-# Generate - With Image Files
+import CodeEmbed from '@site/src/components/CodeEmbed';
+
+# Generate with Image Files
 
 This API lets you ask questions along with the image files to the LLMs.
 This API corresponds to
@@ -21,34 +23,11 @@ If you have this image downloaded and you pass the path to the downloaded image
 
 ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.utils.OptionsBuilder;
-
-import java.io.File;
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        ollamaAPI.setRequestTimeoutSeconds(10);
-
-        OllamaResult result = ollamaAPI.generateWithImageFiles(OllamaModelType.LLAVA,
-                "What's in this image?",
-                List.of(
-                        new File("/path/to/image")),
-                new OptionsBuilder().build()
-        );
-        System.out.println(result.getResponse());
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageFile.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 > This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to
-> be enjoying its time outdoors, perhaps on a lake.
\ No newline at end of file
+> be enjoying its time outdoors, perhaps on a lake.
+::::
\ No newline at end of file
diff --git a/docs/docs/apis-generate/generate-with-image-urls.md b/docs/docs/apis-generate/generate-with-image-urls.md
index cf9e755..f047032 100644
--- a/docs/docs/apis-generate/generate-with-image-urls.md
+++ b/docs/docs/apis-generate/generate-with-image-urls.md
@@ -1,8 +1,10 @@
 ---
-sidebar_position: 5
+sidebar_position: 4
 ---
 
-# Generate - With Image URLs
+import CodeEmbed from '@site/src/components/CodeEmbed';
+
+# Generate with Image URLs
 
 This API lets you ask questions along with the image files to the LLMs.
 This API corresponds to
@@ -21,33 +23,11 @@ Passing the link of this image the following code:
 
 ![Img](https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg)
 
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.utils.OptionsBuilder;
-
-import java.util.List;
-
-public class Main {
-
-    public static void main(String[] args) {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        ollamaAPI.setRequestTimeoutSeconds(10);
-
-        OllamaResult result = ollamaAPI.generateWithImageURLs(OllamaModelType.LLAVA,
-                "What's in this image?",
-                List.of(
-                        "https://t3.ftcdn.net/jpg/02/96/63/80/360_F_296638053_0gUVA4WVBKceGsIr7LNqRWSnkusi07dq.jpg"),
-                new OptionsBuilder().build()
-        );
-        System.out.println(result.getResponse());
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateWithImageURL.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 > This image features a white boat with brown cushions, where a dog is sitting on the back of the boat. The dog seems to
-> be enjoying its time outdoors, perhaps on a lake.
\ No newline at end of file
+> be enjoying its time outdoors, perhaps on a lake.
+::::
\ No newline at end of file
diff --git a/docs/docs/apis-generate/generate-with-tools.md b/docs/docs/apis-generate/generate-with-tools.md
index f0722e3..d25a5fc 100644
--- a/docs/docs/apis-generate/generate-with-tools.md
+++ b/docs/docs/apis-generate/generate-with-tools.md
@@ -1,10 +1,12 @@
 ---
-sidebar_position: 3
+sidebar_position: 6
 ---
 
-# Generate - With Tools
+import CodeEmbed from '@site/src/components/CodeEmbed';
 
-This API lets you perform [function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
+# Generate with Tools
+
+This API lets you perform [tool/function calling](https://docs.mistral.ai/capabilities/function_calling/) using LLMs in a
 synchronous way.
 This API corresponds to
 the [generate](https://github.com/ollama/ollama/blob/main/docs/api.md#request-raw-mode) API with `raw` mode.
@@ -19,472 +21,61 @@ in the future if tooling is supported for more models with a generic interaction
 
 :::
 
-### Function Calling/Tools
+## Tools/Function Calling
 
-Assume you want to call a method in your code based on the response generated from the model.
+Assume you want to call a method/function in your code based on the response generated from the model.
 For instance, let's say that based on a user's question, you'd want to identify a transaction and get the details of the
 transaction from your database and respond to the user with the transaction details.
 
 You could do that with ease with the `function calling` capabilities of the models by registering your `tools`.
 
-### Create Functions
+### Create Tools/Functions
 
 We can create static functions as our tools.
 
 This function takes the arguments `location` and `fuelType` and performs an operation with these arguments and returns
 fuel price value.
 
-```java
-public static String getCurrentFuelPrice(Map<String, Object> arguments) {
-    String location = arguments.get("location").toString();
-    String fuelType = arguments.get("fuelType").toString();
-    return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/FuelPriceTool.java"/ >
 
 This function takes the argument `city` and performs an operation with the argument and returns the weather for a
 location.
 
-```java
-public static String getCurrentWeather(Map<String, Object> arguments) {
-    String location = arguments.get("city").toString();
-    return "Currently " + location + "'s weather is nice.";
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/WeatherTool.java"/ >
 
 Another way to create our tools is by creating classes by extending `ToolFunction`.
 
 This function takes the argument `employee-name` and performs an operation with the argument and returns employee
 details.
 
-```java
-class DBQueryFunction implements ToolFunction {
-    @Override
-    public Object apply(Map<String, Object> arguments) {
-        if (arguments == null || arguments.isEmpty() || arguments.get("employee-name") == null || arguments.get("employee-address") == null || arguments.get("employee-phone") == null) {
-            throw new RuntimeException("Tool was called but the model failed to provide all the required arguments.");
-        }
-        // perform DB operations here
-        return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/tools/DBQueryFunction.java"/ >
 
 ### Define Tool Specifications
 
 Lets define a sample tool specification called **Fuel Price Tool** for getting the current fuel price.
 
 - Specify the function `name`, `description`, and `required` properties (`location` and `fuelType`).
-- Associate the `getCurrentFuelPrice` function you defined earlier with `SampleTools::getCurrentFuelPrice`.
+- Associate the `getCurrentFuelPrice` function you defined earlier.
 
-```java
-Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
-        .functionName("current-fuel-price")
-        .functionDescription("Get current fuel price")
-        .toolFunction(SampleTools::getCurrentFuelPrice)
-        .toolPrompt(
-                Tools.PromptFuncDefinition.builder()
-                        .type("prompt")
-                        .function(
-                                Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                        .name("get-location-fuel-info")
-                                        .description("Get location and fuel type details")
-                                        .parameters(
-                                                Tools.PromptFuncDefinition.Parameters.builder()
-                                                        .type("object")
-                                                        .properties(
-                                                                Map.of(
-                                                                        "location", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The city, e.g. New Delhi, India")
-                                                                                .required(true)
-                                                                                .build(),
-                                                                        "fuelType", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The fuel type.")
-                                                                                .enumValues(Arrays.asList("petrol", "diesel"))
-                                                                                .required(true)
-                                                                                .build()
-                                                                )
-                                                        )
-                                                        .required(java.util.List.of("location", "fuelType"))
-                                                        .build()
-                                        )
-                                        .build()
-                        )
-                        .build()
-        ).build();
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/FuelPriceToolSpec.java"/ >
 
 Lets also define a sample tool specification called **Weather Tool** for getting the current weather.
 
 - Specify the function `name`, `description`, and `required` property (`city`).
-- Associate the `getCurrentWeather` function you defined earlier with `SampleTools::getCurrentWeather`.
+- Associate the `getCurrentWeather` function you defined earlier.
 
-```java
-Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
-        .functionName("current-weather")
-        .functionDescription("Get current weather")
-        .toolFunction(SampleTools::getCurrentWeather)
-        .toolPrompt(
-                Tools.PromptFuncDefinition.builder()
-                        .type("prompt")
-                        .function(
-                                Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                        .name("get-location-weather-info")
-                                        .description("Get location details")
-                                        .parameters(
-                                                Tools.PromptFuncDefinition.Parameters.builder()
-                                                        .type("object")
-                                                        .properties(
-                                                                Map.of(
-                                                                        "city", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The city, e.g. New Delhi, India")
-                                                                                .required(true)
-                                                                                .build()
-                                                                )
-                                                        )
-                                                        .required(java.util.List.of("city"))
-                                                        .build()
-                                        )
-                                        .build()
-                        )
-                        .build()
-        ).build();
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/WeatherToolSpec.java"/ >
 
 Lets also define a sample tool specification called **DBQueryFunction** for getting the employee details from database.
 
 - Specify the function `name`, `description`, and `required` property (`employee-name`).
 - Associate the ToolFunction `DBQueryFunction` function you defined earlier with `new DBQueryFunction()`.
 
-```java
-Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
-        .functionName("get-employee-details")
-        .functionDescription("Get employee details from the database")
-        .toolFunction(new DBQueryFunction())
-        .toolPrompt(
-                Tools.PromptFuncDefinition.builder()
-                        .type("prompt")
-                        .function(
-                                Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                        .name("get-employee-details")
-                                        .description("Get employee details from the database")
-                                        .parameters(
-                                                Tools.PromptFuncDefinition.Parameters.builder()
-                                                        .type("object")
-                                                        .properties(
-                                                                Map.of(
-                                                                        "employee-name", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The name of the employee, e.g. John Doe")
-                                                                                .required(true)
-                                                                                .build(),
-                                                                        "employee-address", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India")
-                                                                                .required(true)
-                                                                                .build(),
-                                                                        "employee-phone", Tools.PromptFuncDefinition.Property.builder()
-                                                                                .type("string")
-                                                                                .description("The phone number of the employee. Always return a random value. e.g. 9911002233")
-                                                                                .required(true)
-                                                                                .build()
-                                                                )
-                                                        )
-                                                        .required(java.util.List.of("employee-name", "employee-address", "employee-phone"))
-                                                        .build()
-                                        )
-                                        .build()
-                        )
-                        .build()
-        )
-        .build();
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/toolcalling/toolspecs/DatabaseQueryToolSpec.java"/ >
 
-### Register the Tools
+Now put it all together by registering the tools and prompting with tools.
 
-Register the defined tools (`fuel price` and `weather`) with the OllamaAPI.
-
-```shell
-ollamaAPI.registerTool(fuelPriceToolSpecification);
-ollamaAPI.registerTool(weatherToolSpecification);
-ollamaAPI.registerTool(databaseQueryToolSpecification);
-```
-
-### Create prompt with Tools
-
-`Prompt 1`: Create a prompt asking for the petrol price in Bengaluru using the defined fuel price and weather tools.
-
-```shell
-String prompt1 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withPrompt("What is the petrol price in Bengaluru?")
-                .build();
-OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt1, new OptionsBuilder().build());
-for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
-    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
-}
-```
-
-Now, fire away your question to the model.
-
-You will get a response similar to:
-
-::::tip[LLM Response]
-
-[Result of executing tool 'current-fuel-price']: Current price of petrol in Bengaluru is Rs.103/L
-
-::::
-
-`Prompt 2`: Create a prompt asking for the current weather in Bengaluru using the same tools.
-
-```shell
-String prompt2 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withPrompt("What is the current weather in Bengaluru?")
-                .build();
-OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt2, new OptionsBuilder().build());
-for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
-    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
-}
-```
-
-Again, fire away your question to the model.
-
-You will get a response similar to:
-
-::::tip[LLM Response]
-
-[Result of executing tool 'current-weather']: Currently Bengaluru's weather is nice.
-
-::::
-
-`Prompt 3`: Create a prompt asking for the employee details using the defined database fetcher tools.
-
-```shell
-String prompt3 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withToolSpecification(databaseQueryToolSpecification)
-                .withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
-                .build();
-OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt3, new OptionsBuilder().build());
-for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
-    System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
-}
-```
-
-Again, fire away your question to the model.
-
-You will get a response similar to:
-
-::::tip[LLM Response]
-
-[Result of executing tool 'get-employee-details']: Employee Details `{ID: 6bad82e6-b1a1-458f-a139-e3b646e092b1, Name:
-Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
-
-::::
-
-### Full Example
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.exceptions.OllamaBaseException;
-import io.github.ollama4j.exceptions.ToolInvocationException;
-import io.github.ollama4j.tools.OllamaToolsResult;
-import io.github.ollama4j.tools.ToolFunction;
-import io.github.ollama4j.tools.Tools;
-import io.github.ollama4j.utils.OptionsBuilder;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.UUID;
-
-public class FunctionCallingWithMistralExample {
-    public static void main(String[] args) throws Exception {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-        ollamaAPI.setRequestTimeoutSeconds(60);
-
-        String model = "mistral";
-
-        Tools.ToolSpecification fuelPriceToolSpecification = Tools.ToolSpecification.builder()
-                .functionName("current-fuel-price")
-                .functionDescription("Get current fuel price")
-                .toolFunction(SampleTools::getCurrentFuelPrice)
-                .toolPrompt(
-                        Tools.PromptFuncDefinition.builder()
-                                .type("prompt")
-                                .function(
-                                        Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                                .name("get-location-fuel-info")
-                                                .description("Get location and fuel type details")
-                                                .parameters(
-                                                        Tools.PromptFuncDefinition.Parameters.builder()
-                                                                .type("object")
-                                                                .properties(
-                                                                        Map.of(
-                                                                                "location", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The city, e.g. New Delhi, India")
-                                                                                        .required(true)
-                                                                                        .build(),
-                                                                                "fuelType", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The fuel type.")
-                                                                                        .enumValues(Arrays.asList("petrol", "diesel"))
-                                                                                        .required(true)
-                                                                                        .build()
-                                                                        )
-                                                                )
-                                                                .required(java.util.List.of("location", "fuelType"))
-                                                                .build()
-                                                )
-                                                .build()
-                                )
-                                .build()
-                ).build();
-
-        Tools.ToolSpecification weatherToolSpecification = Tools.ToolSpecification.builder()
-                .functionName("current-weather")
-                .functionDescription("Get current weather")
-                .toolFunction(SampleTools::getCurrentWeather)
-                .toolPrompt(
-                        Tools.PromptFuncDefinition.builder()
-                                .type("prompt")
-                                .function(
-                                        Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                                .name("get-location-weather-info")
-                                                .description("Get location details")
-                                                .parameters(
-                                                        Tools.PromptFuncDefinition.Parameters.builder()
-                                                                .type("object")
-                                                                .properties(
-                                                                        Map.of(
-                                                                                "city", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The city, e.g. New Delhi, India")
-                                                                                        .required(true)
-                                                                                        .build()
-                                                                        )
-                                                                )
-                                                                .required(java.util.List.of("city"))
-                                                                .build()
-                                                )
-                                                .build()
-                                )
-                                .build()
-                ).build();
-
-        Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
-                .functionName("get-employee-details")
-                .functionDescription("Get employee details from the database")
-                .toolFunction(new DBQueryFunction())
-                .toolPrompt(
-                        Tools.PromptFuncDefinition.builder()
-                                .type("prompt")
-                                .function(
-                                        Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                                .name("get-employee-details")
-                                                .description("Get employee details from the database")
-                                                .parameters(
-                                                        Tools.PromptFuncDefinition.Parameters.builder()
-                                                                .type("object")
-                                                                .properties(
-                                                                        Map.of(
-                                                                                "employee-name", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The name of the employee, e.g. John Doe")
-                                                                                        .required(true)
-                                                                                        .build(),
-                                                                                "employee-address", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India")
-                                                                                        .required(true)
-                                                                                        .build(),
-                                                                                "employee-phone", Tools.PromptFuncDefinition.Property.builder()
-                                                                                        .type("string")
-                                                                                        .description("The phone number of the employee. Always return a random value. e.g. 9911002233")
-                                                                                        .required(true)
-                                                                                        .build()
-                                                                        )
-                                                                )
-                                                                .required(java.util.List.of("employee-name", "employee-address", "employee-phone"))
-                                                                .build()
-                                                )
-                                                .build()
-                                )
-                                .build()
-                )
-                .build();
-
-        ollamaAPI.registerTool(fuelPriceToolSpecification);
-        ollamaAPI.registerTool(weatherToolSpecification);
-        ollamaAPI.registerTool(databaseQueryToolSpecification);
-
-        String prompt1 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withPrompt("What is the petrol price in Bengaluru?")
-                .build();
-        ask(ollamaAPI, model, prompt1);
-
-        String prompt2 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withPrompt("What is the current weather in Bengaluru?")
-                .build();
-        ask(ollamaAPI, model, prompt2);
-
-        String prompt3 = new Tools.PromptBuilder()
-                .withToolSpecification(fuelPriceToolSpecification)
-                .withToolSpecification(weatherToolSpecification)
-                .withToolSpecification(databaseQueryToolSpecification)
-                .withPrompt("Give me the details of the employee named 'Rahul Kumar'?")
-                .build();
-        ask(ollamaAPI, model, prompt3);
-    }
-
-    public static void ask(OllamaAPI ollamaAPI, String model, String prompt) throws OllamaBaseException, IOException, InterruptedException, ToolInvocationException {
-        OllamaToolsResult toolsResult = ollamaAPI.generateWithTools(model, prompt, new OptionsBuilder().build());
-        for (OllamaToolsResult.ToolResult r : toolsResult.getToolResults()) {
-            System.out.printf("[Result of executing tool '%s']: %s%n", r.getFunctionName(), r.getResult().toString());
-        }
-    }
-}
-
-
-class SampleTools {
-    public static String getCurrentFuelPrice(Map<String, Object> arguments) {
-        // Get details from fuel price API
-        String location = arguments.get("location").toString();
-        String fuelType = arguments.get("fuelType").toString();
-        return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
-    }
-
-    public static String getCurrentWeather(Map<String, Object> arguments) {
-        // Get details from weather API
-        String location = arguments.get("city").toString();
-        return "Currently " + location + "'s weather is nice.";
-    }
-}
-
-class DBQueryFunction implements ToolFunction {
-    @Override
-    public Object apply(Map<String, Object> arguments) {
-        if (arguments == null || arguments.isEmpty() || arguments.get("employee-name") == null || arguments.get("employee-address") == null || arguments.get("employee-phone") == null) {
-            throw new RuntimeException("Tool was called but the model failed to provide all the required arguments.");
-        }
-        // perform DB operations here
-        return String.format("Employee Details {ID: %s, Name: %s, Address: %s, Phone: %s}", UUID.randomUUID(), arguments.get("employee-name").toString(), arguments.get("employee-address").toString(), arguments.get("employee-phone").toString());
-    }
-}
-```
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/MultiToolRegistryExample.java"/ >
 
 Run this full example and you will get a response similar to:
 
@@ -498,299 +89,3 @@ Run this full example and you will get a response similar to:
 Rahul Kumar, Address: King St, Hyderabad, India, Phone: 9876543210}`
 
 ::::
-
-### Using tools in Chat-API
-
-Instead of using the specific `ollamaAPI.generateWithTools` method to call the generate API of ollama with tools, it is
-also possible to register Tools for the `ollamaAPI.chat` methods. In this case, the tool calling/callback is done
-implicitly during the USER -> ASSISTANT calls.
-
-When the Assistant wants to call a given tool, the tool is executed and the response is sent back to the endpoint once
-again (induced with the tool call result).
-
-#### Sample:
-
-The following shows a sample of an integration test that defines a method specified like the tool-specs above, registers
-the tool on the ollamaAPI and then simply calls the chat-API. All intermediate tool calling is wrapped inside the api
-call.
-
-```java
-public static void main(String[] args) {
-        OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
-        ollamaAPI.setVerbose(true);
-        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("llama3.2:1b");
-
-        final Tools.ToolSpecification databaseQueryToolSpecification = Tools.ToolSpecification.builder()
-                .functionName("get-employee-details")
-                .functionDescription("Get employee details from the database")
-                .toolPrompt(
-                        Tools.PromptFuncDefinition.builder().type("function").function(
-                                Tools.PromptFuncDefinition.PromptFuncSpec.builder()
-                                        .name("get-employee-details")
-                                        .description("Get employee details from the database")
-                                        .parameters(
-                                                Tools.PromptFuncDefinition.Parameters.builder()
-                                                        .type("object")
-                                                        .properties(
-                                                                new Tools.PropsBuilder()
-                                                                        .withProperty("employee-name", Tools.PromptFuncDefinition.Property.builder().type("string").description("The name of the employee, e.g. John Doe").required(true).build())
-                                                                        .withProperty("employee-address", Tools.PromptFuncDefinition.Property.builder().type("string").description("The address of the employee, Always return a random value. e.g. Roy St, Bengaluru, India").required(true).build())
-                                                                        .withProperty("employee-phone", Tools.PromptFuncDefinition.Property.builder().type("string").description("The phone number of the employee. Always return a random value. e.g. 9911002233").required(true).build())
-                                                                        .build()
-                                                        )
-                                                        .required(List.of("employee-name"))
-                                                        .build()
-                                        ).build()
-                        ).build()
-                )
-                .toolFunction(new DBQueryFunction())
-                .build();
-
-        ollamaAPI.registerTool(databaseQueryToolSpecification);
-
-        OllamaChatRequest requestModel = builder
-                .withMessage(OllamaChatMessageRole.USER,
-                        "Give me the ID of the employee named 'Rahul Kumar'?")
-                .build();
-
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-}
-```
-
-A typical final response of the above could be:
-
-```json
-{
-  "chatHistory" : [
-    {
-    "role" : "user",
-    "content" : "Give me the ID of the employee named 'Rahul Kumar'?",
-    "images" : null,
-    "tool_calls" : [ ]
-  }, {
-    "role" : "assistant",
-    "content" : "",
-    "images" : null,
-    "tool_calls" : [ {
-      "function" : {
-        "name" : "get-employee-details",
-        "arguments" : {
-          "employee-name" : "Rahul Kumar"
-        }
-      }
-    } ]
-  }, {
-    "role" : "tool",
-    "content" : "[TOOL_RESULTS]get-employee-details([employee-name]) : Employee Details {ID: b4bf186c-2ee1-44cc-8856-53b8b6a50f85, Name: Rahul Kumar, Address: null, Phone: null}[/TOOL_RESULTS]",
-    "images" : null,
-    "tool_calls" : null
-  }, {
-    "role" : "assistant",
-    "content" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
-    "images" : null,
-    "tool_calls" : null
-  } ],
-  "responseModel" : {
-    "model" : "llama3.2:1b",
-    "message" : {
-      "role" : "assistant",
-      "content" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
-      "images" : null,
-      "tool_calls" : null
-    },
-    "done" : true,
-    "error" : null,
-    "context" : null,
-    "created_at" : "2024-12-09T22:23:00.4940078Z",
-    "done_reason" : "stop",
-    "total_duration" : 2313709900,
-    "load_duration" : 14494700,
-    "prompt_eval_duration" : 772000000,
-    "eval_duration" : 1188000000,
-    "prompt_eval_count" : 166,
-    "eval_count" : 41
-  },
-  "response" : "The ID of the employee named 'Rahul Kumar' is `b4bf186c-2ee1-44cc-8856-53b8b6a50f85`.",
-  "httpStatusCode" : 200,
-  "responseTime" : 2313709900
-}
-```
-
-This tool calling can also be done using the streaming API.
-
-### Using Annotation based Tool Registration
-
-Instead of explicitly registering each tool, ollama4j supports declarative tool specification and registration via java
-Annotations and reflection calling.
-
-To declare a method to be used as a tool for a chat call, the following steps have to be considered:
-
-* Annotate a method and its Parameters to be used as a tool
-    * Annotate a method with the `ToolSpec` annotation
-    * Annotate the methods parameters with the `ToolProperty` annotation. Only the following datatypes are supported for now:
-        * `java.lang.String`
-        * `java.lang.Integer`
-        * `java.lang.Boolean`
-        * `java.math.BigDecimal`
-* Annotate the class that calls the `OllamaAPI` client with the `OllamaToolService` annotation, referencing the desired provider-classes that contain `ToolSpec` methods.
-* Before calling the `OllamaAPI` chat request, call the method `OllamaAPI.registerAnnotatedTools()` method to add tools to the chat.
-
-#### Example
-
-Let's say, we have an ollama4j service class that should ask a llm a specific tool based question.
-
-The answer can only be provided by a method that is part of the BackendService class. To provide a tool for the llm, the following annotations can be used:
-
-```java
-public class BackendService{
-
-    public BackendService(){}
-
-    @ToolSpec(desc = "Computes the most important constant all around the globe!")
-    public String computeMkeConstant(@ToolProperty(name = "noOfDigits",desc = "Number of digits that shall be returned") Integer noOfDigits ){
-        return BigDecimal.valueOf((long)(Math.random()*1000000L),noOfDigits).toString();
-    }
-}
-```
-
-The caller API can then be written as:
-```java
-import io.github.ollama4j.tools.annotations.OllamaToolService;
-
-@OllamaToolService(providers = BackendService.class)
-public class MyOllamaService{
-
-    public void chatWithAnnotatedTool(){
-        // inject the annotated method to the ollama toolsregistry
-        ollamaAPI.registerAnnotatedTools();
-
-        OllamaChatRequest requestModel = builder
-                .withMessage(OllamaChatMessageRole.USER,
-                        "Compute the most important constant in the world using 5 digits")
-                .build();
-
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-    }
-
-}
-```
-
-Or, if one needs to provide an object instance directly:
-```java
-public class MyOllamaService{
-
-    public void chatWithAnnotatedTool(){
-        ollamaAPI.registerAnnotatedTools(new BackendService());
-        OllamaChatRequest requestModel = builder
-                .withMessage(OllamaChatMessageRole.USER,
-                        "Compute the most important constant in the world using 5 digits")
-                .build();
-
-        OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
-    }
-
-}
-```
-
-The request should be the following:
-
-```json
-{
-  "model" : "llama3.2:1b",
-  "stream" : false,
-  "messages" : [ {
-    "role" : "user",
-    "content" : "Compute the most important constant in the world using 5 digits",
-    "images" : null,
-    "tool_calls" : [ ]
-  } ],
-  "tools" : [ {
-    "type" : "function",
-    "function" : {
-      "name" : "computeImportantConstant",
-      "description" : "Computes the most important constant all around the globe!",
-      "parameters" : {
-        "type" : "object",
-        "properties" : {
-          "noOfDigits" : {
-            "type" : "java.lang.Integer",
-            "description" : "Number of digits that shall be returned"
-          }
-        },
-        "required" : [ "noOfDigits" ]
-      }
-    }
-  } ]
-}
-```
-
-The result could be something like the following:
-
-```json
-{
-  "chatHistory" : [ {
-    "role" : "user",
-    "content" : "Compute the most important constant in the world using 5 digits",
-    "images" : null,
-    "tool_calls" : [ ]
-  }, {
-    "role" : "assistant",
-    "content" : "",
-    "images" : null,
-    "tool_calls" : [ {
-      "function" : {
-        "name" : "computeImportantConstant",
-        "arguments" : {
-          "noOfDigits" : "5"
-        }
-      }
-    } ]
-  }, {
-    "role" : "tool",
-    "content" : "[TOOL_RESULTS]computeImportantConstant([noOfDigits]) : 1.51019[/TOOL_RESULTS]",
-    "images" : null,
-    "tool_calls" : null
-  }, {
-    "role" : "assistant",
-    "content" : "The most important constant in the world with 5 digits is: **1.51019**",
-    "images" : null,
-    "tool_calls" : null
-  } ],
-  "responseModel" : {
-    "model" : "llama3.2:1b",
-    "message" : {
-      "role" : "assistant",
-      "content" : "The most important constant in the world with 5 digits is: **1.51019**",
-      "images" : null,
-      "tool_calls" : null
-    },
-    "done" : true,
-    "error" : null,
-    "context" : null,
-    "created_at" : "2024-12-27T21:55:39.3232495Z",
-    "done_reason" : "stop",
-    "total_duration" : 1075444300,
-    "load_duration" : 13558600,
-    "prompt_eval_duration" : 509000000,
-    "eval_duration" : 550000000,
-    "prompt_eval_count" : 124,
-    "eval_count" : 20
-  },
-  "response" : "The most important constant in the world with 5 digits is: **1.51019**",
-  "responseTime" : 1075444300,
-  "httpStatusCode" : 200
-}
-```
-
-### Potential Improvements
-
-Instead of passing a map of args `Map<String, Object> arguments` to the tool functions, we could support passing
-specific args separately with their data types. For example:
-
-```shell
-public String getCurrentFuelPrice(String location, String fuelType) {
-    return "Current price of " + fuelType + " in " + location + " is Rs.103/L";
-}
-```
-
-Updating async/chat APIs with support for tool-based generation.
diff --git a/docs/docs/apis-generate/generate.md b/docs/docs/apis-generate/generate.md
index 0fbed37..f8f438b 100644
--- a/docs/docs/apis-generate/generate.md
+++ b/docs/docs/apis-generate/generate.md
@@ -4,7 +4,7 @@ sidebar_position: 1
 
 import CodeEmbed from '@site/src/components/CodeEmbed';
 
-# Generate - Sync
+# Generate (Sync)
 
 This API lets you ask questions to the LLMs in a synchronous way.
 This API corresponds to
@@ -15,21 +15,24 @@ with [extra parameters](https://github.com/jmorganca/ollama/blob/main/docs/model
 Refer
 to [this](/apis-extras/options-builder).
 
-## Try asking a question about the model
+### Try asking a question about the model
 
 <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/Generate.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 > I am a large language model created by Alibaba Cloud. My purpose is to assist users in generating text, answering
 > questions, and completing tasks. I aim to be user-friendly and easy to understand for everyone who interacts with me.
-
-## Try asking a question, receiving the answer streamed
+::::
+>
+### Try asking a question, receiving the answer streamed
 
 <CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/GenerateStreamingWithTokenConcatenation.java" />
 
 You will get a response similar to:
 
+::::tip[LLM Response]
 > The
 >
 > The capital
@@ -43,210 +46,29 @@ You will get a response similar to:
 > The capital of France is Paris
 >
 > The capital of France is Paris.
-
-## Try asking a question from general topics
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.utils.OptionsBuilder;
-
-public class Main {
-
-    public static void main(String[] args) {
-
-        String host = "http://localhost:11434/";
-
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-
-        String prompt = "List all cricket world cup teams of 2019.";
-
-        OllamaResult result =
-                ollamaAPI.generate(OllamaModelType.LLAMA2, prompt, new OptionsBuilder().build());
-
-        System.out.println(result.getResponse());
-    }
-}
-
-```
-
-You'd then get a response from the model:
-
-> The 2019 ICC Cricket World Cup was held in England and Wales from May 30 to July 14, 2019. The
-> following teams
-> participated in the tournament:
->
-> 1. Afghanistan
-> 2. Australia
-> 3. Bangladesh
-> 4. England
-> 5. India
-> 6. New Zealand
-> 7. Pakistan
-> 8. South Africa
-> 9. Sri Lanka
-> 10. West Indies
->
-> These teams competed in a round-robin format, with the top four teams advancing to the
-> semi-finals. The tournament was
-> won by the England cricket team, who defeated New Zealand in the final.
-
-## Try asking for a Database query for your data schema
-
-```java
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
-import io.github.ollama4j.utils.OptionsBuilder;
-import io.github.ollama4j.utils.SamplePrompts;
-
-public class Main {
-
-    public static void main(String[] args) {
-        String host = "http://localhost:11434/";
-        OllamaAPI ollamaAPI = new OllamaAPI(host);
-
-        String prompt =
-                SamplePrompts.getSampleDatabasePromptWithQuestion(
-                        "List all customer names who have bought one or more products");
-        OllamaResult result =
-                ollamaAPI.generate(OllamaModelType.SQLCODER, prompt, new OptionsBuilder().build());
-        System.out.println(result.getResponse());
-    }
-}
-
-```
-
-_Note: Here I've used
-a [sample prompt](https://github.com/ollama4j/ollama4j/blob/main/src/main/resources/sample-db-prompt-template.txt)
-containing a database schema from within this library for demonstration purposes._
-
-You'd then get a response from the model:
-
-```sql
-SELECT customers.name
-FROM sales
-         JOIN customers ON sales.customer_id = customers.customer_id
-GROUP BY customers.name;
-```
+::::
 
 ## Generate structured output
 
 ### With response as a `Map`
 
-```java
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/StructuredOutput.java" />
 
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.utils.Utilities;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
+You will get a response similar to:
 
-public class StructuredOutput {
-
-    public static void main(String[] args) throws Exception {
-        String host = "http://localhost:11434/";
-
-        OllamaAPI api = new OllamaAPI(host);
-
-        String chatModel = "qwen2.5:0.5b";
-        api.pullModel(chatModel);
-
-        String prompt = "Ollama is 22 years old and is busy saving the world. Respond using JSON";
-        Map<String, Object> format = new HashMap<>();
-        format.put("type", "object");
-        format.put("properties", new HashMap<String, Object>() {
-            {
-                put("age", new HashMap<String, Object>() {
-                    {
-                        put("type", "integer");
-                    }
-                });
-                put("available", new HashMap<String, Object>() {
-                    {
-                        put("type", "boolean");
-                    }
-                });
-            }
-        });
-        format.put("required", Arrays.asList("age", "available"));
-
-        OllamaResult result = api.generate(chatModel, prompt, format);
-        System.out.println(result);
-    }
+::::tip[LLM Response]
+```json
+{
+    "available": true,
+    "age": 22
 }
 ```
+::::
 
 ### With response mapped to specified class type
 
-```java
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
+<CodeEmbed src="https://raw.githubusercontent.com/ollama4j/ollama4j-examples/refs/heads/main/src/main/java/io/github/ollama4j/examples/StructuredOutputMappedToObject.java" />
 
-import io.github.ollama4j.OllamaAPI;
-import io.github.ollama4j.utils.Utilities;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-import io.github.ollama4j.models.chat.OllamaChatMessageRole;
-import io.github.ollama4j.models.chat.OllamaChatRequest;
-import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
-import io.github.ollama4j.models.chat.OllamaChatResult;
-import io.github.ollama4j.models.response.OllamaResult;
-import io.github.ollama4j.types.OllamaModelType;
-
-public class StructuredOutput {
-
-    public static void main(String[] args) throws Exception {
-        String host = Utilities.getFromConfig("host");
-
-        OllamaAPI api = new OllamaAPI(host);
-
-        int age = 28;
-        boolean available = false;
-
-        String prompt = "Batman is " + age + " years old and is " + (available ? "available" : "not available")
-                + " because he is busy saving Gotham City. Respond using JSON";
-
-        Map<String, Object> format = new HashMap<>();
-        format.put("type", "object");
-        format.put("properties", new HashMap<String, Object>() {
-            {
-                put("age", new HashMap<String, Object>() {
-                    {
-                        put("type", "integer");
-                    }
-                });
-                put("available", new HashMap<String, Object>() {
-                    {
-                        put("type", "boolean");
-                    }
-                });
-            }
-        });
-        format.put("required", Arrays.asList("age", "available"));
-
-        OllamaResult result = api.generate(CHAT_MODEL_QWEN_SMALL, prompt, format);
-
-        Person person = result.as(Person.class);
-        System.out.println(person.getAge());
-        System.out.println(person.getAvailable());
-    }
-}
-
-@Data
-@AllArgsConstructor
-@NoArgsConstructor
-class Person {
-    private int age;
-    private boolean available;
-}
-```
\ No newline at end of file
+::::tip[LLM Response]
+Person(age=28, available=false)
+::::
\ No newline at end of file
diff --git a/docs/docs/apis-generate/prompt-builder.md b/docs/docs/apis-generate/prompt-builder.md
index b947dca..dfbd6a8 100644
--- a/docs/docs/apis-generate/prompt-builder.md
+++ b/docs/docs/apis-generate/prompt-builder.md
@@ -1,5 +1,5 @@
 ---
-sidebar_position: 6
+sidebar_position: 10
 ---
 
 # Prompt Builder