mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-11-03 18:10:42 +01:00 
			
		
		
		
	Compare commits
	
		
			62 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					78a5eedc8f | ||
| 
						 | 
					364f961ee2 | ||
| 
						 | 
					b21aa6add2 | ||
| 
						 | 
					ec4abd1c2d | ||
| 
						 | 
					9900ae92fb | ||
| 
						 | 
					fa20daf6e5 | ||
| 
						 | 
					44949c0559 | ||
| 
						 | 
					e88711a017 | ||
| 
						 | 
					32169ded18 | ||
| 
						 | 
					4b2d566fd9 | ||
| 
						 | 
					fb4b7a7ce5 | ||
| 
						 | 
					18f27775b0 | ||
| 
						 | 
					cb462ad05a | ||
| 
						 | 
					1eec22ca1a | ||
| 
						 | 
					c1f3c51f88 | ||
| 
						 | 
					7dd556293f | ||
| 
						 | 
					ee50131ce4 | ||
| 
						 | 
					2cd47dbfaa | ||
| 
						 | 
					e5296c1067 | ||
| 
						 | 
					0f00f05e3d | ||
| 
						 | 
					976a3b82e5 | ||
| 
						 | 
					ba26d620c4 | ||
| 
						 | 
					e45246a767 | ||
| 
						 | 
					7336668f0c | ||
| 
						 | 
					11701fb222 | ||
| 
						 | 
					b1ec12c4e9 | ||
| 
						 | 
					d0b0a0fc97 | ||
| 
						 | 
					20774fca6b | ||
| 
						 | 
					9c46b510d8 | ||
| 
						 | 
					9d887b60a8 | ||
| 
						 | 
					63d4de4e24 | ||
| 
						 | 
					a10692e2f1 | ||
| 
						 | 
					b0c152a42e | ||
| 
						 | 
					f44767e023 | ||
| 
						 | 
					aadef0a57c | ||
| 
						 | 
					777ee7ffe0 | ||
| 
						 | 
					dcf1d0bdbc | ||
| 
						 | 
					13b7111a42 | ||
| 
						 | 
					09442d37a3 | ||
| 
						 | 
					1e66bdb07f | ||
| 
						 | 
					b423090db9 | ||
| 
						 | 
					a32d94efbf | ||
| 
						 | 
					31f8302849 | ||
| 
						 | 
					6487756764 | ||
| 
						 | 
					abb76ad867 | ||
| 
						 | 
					cf4e7a96e8 | ||
| 
						 | 
					0f414f71a3 | ||
| 
						 | 
					2b700fdad8 | ||
| 
						 | 
					06c5daa253 | ||
| 
						 | 
					91aab6cbd1 | ||
| 
						 | 
					f38a00ebdc | ||
| 
						 | 
					0f73ea75ab | ||
| 
						 | 
					8fe869afdb | ||
| 
						 | 
					2d274c4f5b | ||
| 
						 | 
					713a3239a4 | ||
| 
						 | 
					a9e7958d44 | ||
| 
						 | 
					f38e84053f | ||
| 
						 | 
					7eb16b7ba0 | ||
| 
						 | 
					5a3889d8ee | ||
| 
						 | 
					e9621f054d | ||
| 
						 | 
					b41b62220c | ||
| 
						 | 
					c89440cbca | 
							
								
								
									
										12
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								README.md
									
									
									
									
									
								
							@@ -67,7 +67,7 @@ In your Maven project, add this dependency:
 | 
				
			|||||||
<dependency>
 | 
					<dependency>
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.amithkoujalgi</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.47</version>
 | 
					    <version>1.0.57</version>
 | 
				
			||||||
</dependency>
 | 
					</dependency>
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -125,15 +125,15 @@ Actions CI workflow.
 | 
				
			|||||||
- [x] Update request body creation with Java objects
 | 
					- [x] Update request body creation with Java objects
 | 
				
			||||||
- [ ] Async APIs for images
 | 
					- [ ] Async APIs for images
 | 
				
			||||||
- [ ] Add custom headers to requests
 | 
					- [ ] Add custom headers to requests
 | 
				
			||||||
- [ ] Add additional params for `ask` APIs such as:
 | 
					- [x] Add additional params for `ask` APIs such as:
 | 
				
			||||||
    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
					    - [x] `options`: additional model parameters for the Modelfile such as `temperature` -
 | 
				
			||||||
      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
					      Supported [params](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
 | 
				
			||||||
    - [ ] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
					    - [x] `system`: system prompt to (overrides what is defined in the Modelfile)
 | 
				
			||||||
    - [ ] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
					    - [x] `template`: the full prompt or prompt template (overrides what is defined in the Modelfile)
 | 
				
			||||||
    - [ ] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
					    - [x] `context`: the context parameter returned from a previous request, which can be used to keep a
 | 
				
			||||||
      short
 | 
					      short
 | 
				
			||||||
      conversational memory
 | 
					      conversational memory
 | 
				
			||||||
    - [ ] `stream`: Add support for streaming responses from the model
 | 
					    - [x] `stream`: Add support for streaming responses from the model
 | 
				
			||||||
- [ ] Add test cases
 | 
					- [ ] Add test cases
 | 
				
			||||||
- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
					- [ ] Handle exceptions better (maybe throw more appropriate exceptions)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
{
 | 
					{
 | 
				
			||||||
  "label": "APIs - Extras",
 | 
					  "label": "APIs - Extras",
 | 
				
			||||||
  "position": 10,
 | 
					  "position": 4,
 | 
				
			||||||
  "link": {
 | 
					  "link": {
 | 
				
			||||||
    "type": "generated-index",
 | 
					    "type": "generated-index",
 | 
				
			||||||
    "description": "Details of APIs to handle bunch of extra stuff."
 | 
					    "description": "Details of APIs to handle bunch of extra stuff."
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
{
 | 
					{
 | 
				
			||||||
  "label": "APIs - Ask",
 | 
					  "label": "APIs - Generate",
 | 
				
			||||||
  "position": 10,
 | 
					  "position": 3,
 | 
				
			||||||
  "link": {
 | 
					  "link": {
 | 
				
			||||||
    "type": "generated-index",
 | 
					    "type": "generated-index",
 | 
				
			||||||
    "description": "Details of APIs to interact with LLMs."
 | 
					    "description": "Details of APIs to interact with LLMs."
 | 
				
			||||||
@@ -69,6 +69,41 @@ You will get a response similar to:
 | 
				
			|||||||
  } ]
 | 
					  } ]
 | 
				
			||||||
```
 | 
					```
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Create a conversation where the answer is streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					        OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					                "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					            .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // define a handler (Consumer<String>)
 | 
				
			||||||
 | 
					        OllamaStreamHandler streamHandler = (s) -> {
 | 
				
			||||||
 | 
					           System.out.println(s);
 | 
				
			||||||
 | 
					        };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaChatResult chatResult = ollamaAPI.chat(requestModel,streamHandler);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> The
 | 
				
			||||||
 | 
					> The capital
 | 
				
			||||||
 | 
					> The capital of
 | 
				
			||||||
 | 
					> The capital of France
 | 
				
			||||||
 | 
					> The capital of France is 
 | 
				
			||||||
 | 
					> The capital of France is Paris
 | 
				
			||||||
 | 
					> The capital of France is Paris.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Create a new conversation with individual system prompt
 | 
					## Create a new conversation with individual system prompt
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
public class Main {
 | 
					public class Main {
 | 
				
			||||||
@@ -41,6 +41,41 @@ You will get a response similar to:
 | 
				
			|||||||
> require
 | 
					> require
 | 
				
			||||||
> natural language understanding and generation capabilities.
 | 
					> natural language understanding and generation capabilities.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Try asking a question, receiving the answer streamed
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					```java
 | 
				
			||||||
 | 
					public class Main {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static void main(String[] args) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String host = "http://localhost:11434/";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        OllamaAPI ollamaAPI = new OllamaAPI(host);
 | 
				
			||||||
 | 
					        // define a stream handler (Consumer<String>)
 | 
				
			||||||
 | 
					        OllamaStreamHandler streamHandler = (s) -> {
 | 
				
			||||||
 | 
					           System.out.println(s);
 | 
				
			||||||
 | 
					        };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // Should be called using seperate thread to gain non blocking streaming effect.
 | 
				
			||||||
 | 
					        OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
				
			||||||
 | 
					          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					          new OptionsBuilder().build(), streamHandler);
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        System.out.println("Full response: " +result.getResponse());
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					```
 | 
				
			||||||
 | 
					You will get a response similar to:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> The
 | 
				
			||||||
 | 
					> The capital
 | 
				
			||||||
 | 
					> The capital of
 | 
				
			||||||
 | 
					> The capital of France
 | 
				
			||||||
 | 
					> The capital of France is 
 | 
				
			||||||
 | 
					> The capital of France is Paris
 | 
				
			||||||
 | 
					> The capital of France is Paris.
 | 
				
			||||||
 | 
					> Full response: The capital of France is Paris.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## Try asking a question from general topics.
 | 
					## Try asking a question from general topics.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
```java
 | 
					```java
 | 
				
			||||||
@@ -42,7 +42,7 @@ public class AskPhi {
 | 
				
			|||||||
                        .addSeparator()
 | 
					                        .addSeparator()
 | 
				
			||||||
                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
					                        .add("How do I read a file in Go and print its contents to stdout?");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build());
 | 
					        OllamaResult response = ollamaAPI.generate(model, promptBuilder.build(), new OptionsBuilder().build());
 | 
				
			||||||
        System.out.println(response.getResponse());
 | 
					        System.out.println(response.getResponse());
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
{
 | 
					{
 | 
				
			||||||
  "label": "APIs - Model Management",
 | 
					  "label": "APIs - Model Management",
 | 
				
			||||||
  "position": 4,
 | 
					  "position": 2,
 | 
				
			||||||
  "link": {
 | 
					  "link": {
 | 
				
			||||||
    "type": "generated-index",
 | 
					    "type": "generated-index",
 | 
				
			||||||
    "description": "Details of APIs to manage LLMs."
 | 
					    "description": "Details of APIs to manage LLMs."
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										12
									
								
								pom.xml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								pom.xml
									
									
									
									
									
								
							@@ -4,7 +4,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    <groupId>io.github.amithkoujalgi</groupId>
 | 
					    <groupId>io.github.amithkoujalgi</groupId>
 | 
				
			||||||
    <artifactId>ollama4j</artifactId>
 | 
					    <artifactId>ollama4j</artifactId>
 | 
				
			||||||
    <version>1.0.52</version>
 | 
					    <version>1.0.64</version>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <name>Ollama4j</name>
 | 
					    <name>Ollama4j</name>
 | 
				
			||||||
    <description>Java library for interacting with Ollama API.</description>
 | 
					    <description>Java library for interacting with Ollama API.</description>
 | 
				
			||||||
@@ -39,7 +39,7 @@
 | 
				
			|||||||
        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
					        <connection>scm:git:git@github.com:amithkoujalgi/ollama4j.git</connection>
 | 
				
			||||||
        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
					        <developerConnection>scm:git:https://github.com/amithkoujalgi/ollama4j.git</developerConnection>
 | 
				
			||||||
        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
					        <url>https://github.com/amithkoujalgi/ollama4j</url>
 | 
				
			||||||
        <tag>v1.0.52</tag>
 | 
					        <tag>v1.0.64</tag>
 | 
				
			||||||
    </scm>
 | 
					    </scm>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <build>
 | 
					    <build>
 | 
				
			||||||
@@ -99,7 +99,7 @@
 | 
				
			|||||||
                <configuration>
 | 
					                <configuration>
 | 
				
			||||||
                    <skipTests>${skipUnitTests}</skipTests>
 | 
					                    <skipTests>${skipUnitTests}</skipTests>
 | 
				
			||||||
                    <includes>
 | 
					                    <includes>
 | 
				
			||||||
                        <include>**/unittests/*.java</include>
 | 
					                        <include>**/unittests/**/*.java</include>
 | 
				
			||||||
                    </includes>
 | 
					                    </includes>
 | 
				
			||||||
                </configuration>
 | 
					                </configuration>
 | 
				
			||||||
            </plugin>
 | 
					            </plugin>
 | 
				
			||||||
@@ -174,6 +174,12 @@
 | 
				
			|||||||
            <version>4.1.0</version>
 | 
					            <version>4.1.0</version>
 | 
				
			||||||
            <scope>test</scope>
 | 
					            <scope>test</scope>
 | 
				
			||||||
        </dependency>
 | 
					        </dependency>
 | 
				
			||||||
 | 
					        <dependency>
 | 
				
			||||||
 | 
					            <groupId>org.json</groupId>
 | 
				
			||||||
 | 
					            <artifactId>json</artifactId>
 | 
				
			||||||
 | 
					            <version>20240205</version>
 | 
				
			||||||
 | 
					            <scope>test</scope>
 | 
				
			||||||
 | 
					        </dependency>
 | 
				
			||||||
    </dependencies>
 | 
					    </dependencies>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <distributionManagement>
 | 
					    <distributionManagement>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -6,9 +6,11 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessage;
 | 
				
			|||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelEmbeddingsRequest;
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
 | 
				
			||||||
@@ -312,8 +314,18 @@ public class OllamaAPI {
 | 
				
			|||||||
   */
 | 
					   */
 | 
				
			||||||
  public List<Double> generateEmbeddings(String model, String prompt)
 | 
					  public List<Double> generateEmbeddings(String model, String prompt)
 | 
				
			||||||
      throws IOException, InterruptedException, OllamaBaseException {
 | 
					      throws IOException, InterruptedException, OllamaBaseException {
 | 
				
			||||||
 | 
					        return generateEmbeddings(new OllamaEmbeddingsRequestModel(model, prompt));
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    /**
 | 
				
			||||||
 | 
					   * Generate embeddings using a {@link OllamaEmbeddingsRequestModel}.
 | 
				
			||||||
 | 
					   *
 | 
				
			||||||
 | 
					   * @param modelRequest request for '/api/embeddings' endpoint
 | 
				
			||||||
 | 
					   * @return embeddings
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{
 | 
				
			||||||
    URI uri = URI.create(this.host + "/api/embeddings");
 | 
					    URI uri = URI.create(this.host + "/api/embeddings");
 | 
				
			||||||
    String jsonData = new ModelEmbeddingsRequest(model, prompt).toString();
 | 
					    String jsonData = modelRequest.toString();
 | 
				
			||||||
    HttpClient httpClient = HttpClient.newHttpClient();
 | 
					    HttpClient httpClient = HttpClient.newHttpClient();
 | 
				
			||||||
    HttpRequest.Builder requestBuilder =
 | 
					    HttpRequest.Builder requestBuilder =
 | 
				
			||||||
        getRequestBuilderDefault(uri)
 | 
					        getRequestBuilderDefault(uri)
 | 
				
			||||||
@@ -324,8 +336,8 @@ public class OllamaAPI {
 | 
				
			|||||||
    int statusCode = response.statusCode();
 | 
					    int statusCode = response.statusCode();
 | 
				
			||||||
    String responseBody = response.body();
 | 
					    String responseBody = response.body();
 | 
				
			||||||
    if (statusCode == 200) {
 | 
					    if (statusCode == 200) {
 | 
				
			||||||
      EmbeddingResponse embeddingResponse =
 | 
					      OllamaEmbeddingResponseModel embeddingResponse =
 | 
				
			||||||
          Utils.getObjectMapper().readValue(responseBody, EmbeddingResponse.class);
 | 
					          Utils.getObjectMapper().readValue(responseBody, OllamaEmbeddingResponseModel.class);
 | 
				
			||||||
      return embeddingResponse.getEmbedding();
 | 
					      return embeddingResponse.getEmbedding();
 | 
				
			||||||
    } else {
 | 
					    } else {
 | 
				
			||||||
      throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
					      throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
				
			||||||
@@ -341,13 +353,24 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generate(String model, String prompt, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
  public OllamaResult generate(String model, String prompt, Options options)
 | 
					  public OllamaResult generate(String model, String prompt, Options options)
 | 
				
			||||||
  throws OllamaBaseException, IOException, InterruptedException {
 | 
					  throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    return generate(model, prompt, options,null);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
@@ -360,7 +383,7 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @return the ollama async result callback handle
 | 
					   * @return the ollama async result callback handle
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
					  public OllamaAsyncResultCallback generateAsync(String model, String prompt) {
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    URI uri = URI.create(this.host + "/api/generate");
 | 
					    URI uri = URI.create(this.host + "/api/generate");
 | 
				
			||||||
    OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
					    OllamaAsyncResultCallback ollamaAsyncResultCallback =
 | 
				
			||||||
@@ -380,20 +403,32 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult generateWithImageFiles(
 | 
					  public OllamaResult generateWithImageFiles(
 | 
				
			||||||
      String model, String prompt, List<File> imageFiles, Options options)
 | 
					      String model, String prompt, List<File> imageFiles, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
    for (File imageFile : imageFiles) {
 | 
					    for (File imageFile : imageFiles) {
 | 
				
			||||||
      images.add(encodeFileToBase64(imageFile));
 | 
					      images.add(encodeFileToBase64(imageFile));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					   /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generateWithImageFiles(
 | 
				
			||||||
 | 
					    String model, String prompt, List<File> imageFiles, Options options)
 | 
				
			||||||
 | 
					    throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					      return generateWithImageFiles(model, prompt, imageFiles, options, null);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
   * With one or more image URLs, ask a question to a model running on Ollama server. This is a
 | 
					   * With one or more image URLs, ask a question to a model running on Ollama server. This is a
 | 
				
			||||||
   * sync/blocking call.
 | 
					   * sync/blocking call.
 | 
				
			||||||
@@ -404,18 +439,30 @@ public class OllamaAPI {
 | 
				
			|||||||
   * @param options the Options object - <a
 | 
					   * @param options the Options object - <a
 | 
				
			||||||
   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
					   *     href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values">More
 | 
				
			||||||
   *     details on the options</a>
 | 
					   *     details on the options</a>
 | 
				
			||||||
 | 
					   * @param streamHandler optional callback consumer that will be applied every time a streamed response is received. If not set, the stream parameter of the request is set to false.
 | 
				
			||||||
   * @return OllamaResult that includes response text and time taken for response
 | 
					   * @return OllamaResult that includes response text and time taken for response
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaResult generateWithImageURLs(
 | 
					  public OllamaResult generateWithImageURLs(
 | 
				
			||||||
      String model, String prompt, List<String> imageURLs, Options options)
 | 
					      String model, String prompt, List<String> imageURLs, Options options, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
				
			||||||
    List<String> images = new ArrayList<>();
 | 
					    List<String> images = new ArrayList<>();
 | 
				
			||||||
    for (String imageURL : imageURLs) {
 | 
					    for (String imageURL : imageURLs) {
 | 
				
			||||||
      images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
					      images.add(encodeByteArrayToBase64(Utils.loadImageBytesFromUrl(imageURL)));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaRequestModel ollamaRequestModel = new OllamaRequestModel(model, prompt, images);
 | 
					    OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
 | 
				
			||||||
    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
					    ollamaRequestModel.setOptions(options.getOptionsMap());
 | 
				
			||||||
    return generateSyncForOllamaRequestModel(ollamaRequestModel);
 | 
					    return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /**
 | 
				
			||||||
 | 
					   * Convenience method to call Ollama API without streaming responses.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
 | 
				
			||||||
 | 
					      Options options)
 | 
				
			||||||
 | 
					      throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
				
			||||||
 | 
					    return generateWithImageURLs(model, prompt, imageURLs, options, null);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -448,12 +495,31 @@ public class OllamaAPI {
 | 
				
			|||||||
  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
   */
 | 
					   */
 | 
				
			||||||
  public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{
 | 
					  public OllamaChatResult chat(OllamaChatRequestModel request)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
    OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					    return chat(request,null);
 | 
				
			||||||
    //TODO: implement async way
 | 
					  }
 | 
				
			||||||
    if(request.isStream()){
 | 
					
 | 
				
			||||||
      throw new UnsupportedOperationException("Streamed chat responses are not implemented yet");
 | 
					  /**
 | 
				
			||||||
 | 
					   * Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}. 
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * Hint: the OllamaChatRequestModel#getStream() property is not implemented.
 | 
				
			||||||
 | 
					   * 
 | 
				
			||||||
 | 
					   * @param request request object to be sent to the server
 | 
				
			||||||
 | 
					   * @param streamHandler callback handler to handle the last message from stream (caution: all previous messages from stream will be concatenated)
 | 
				
			||||||
 | 
					   * @return 
 | 
				
			||||||
 | 
					  * @throws OllamaBaseException any response code than 200 has been returned
 | 
				
			||||||
 | 
					  * @throws IOException in case the responseStream can not be read
 | 
				
			||||||
 | 
					  * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
 | 
					   */
 | 
				
			||||||
 | 
					  public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					    OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 | 
					    OllamaResult result;
 | 
				
			||||||
 | 
					    if(streamHandler != null){
 | 
				
			||||||
 | 
					      request.setStream(true);
 | 
				
			||||||
 | 
					      result = requestCaller.call(request, streamHandler);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    else {
 | 
				
			||||||
 | 
					     result = requestCaller.callSync(request);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    OllamaResult result = requestCaller.generateSync(request);
 | 
					 | 
				
			||||||
    return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
					    return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -467,10 +533,19 @@ public class OllamaAPI {
 | 
				
			|||||||
    return Base64.getEncoder().encodeToString(bytes);
 | 
					    return Base64.getEncoder().encodeToString(bytes);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private OllamaResult generateSyncForOllamaRequestModel(OllamaRequestModel ollamaRequestModel)
 | 
					  private OllamaResult generateSyncForOllamaRequestModel(
 | 
				
			||||||
 | 
					      OllamaGenerateRequestModel ollamaRequestModel, OllamaStreamHandler streamHandler)
 | 
				
			||||||
      throws OllamaBaseException, IOException, InterruptedException {
 | 
					      throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
        OllamaGenerateEndpointCaller requestCaller = new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					    OllamaGenerateEndpointCaller requestCaller =
 | 
				
			||||||
        return requestCaller.generateSync(ollamaRequestModel);
 | 
					        new OllamaGenerateEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
 | 
					    OllamaResult result;
 | 
				
			||||||
 | 
					    if (streamHandler != null) {
 | 
				
			||||||
 | 
					      ollamaRequestModel.setStream(true);
 | 
				
			||||||
 | 
					      result = requestCaller.call(ollamaRequestModel, streamHandler);
 | 
				
			||||||
 | 
					    } else {
 | 
				
			||||||
 | 
					      result = requestCaller.callSync(ollamaRequestModel);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    return result;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,7 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.function.Consumer;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public interface OllamaStreamHandler extends Consumer<String>{
 | 
				
			||||||
 | 
					    void accept(String message);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,6 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -34,4 +36,13 @@ public class Model {
 | 
				
			|||||||
    return name.split(":")[1];
 | 
					    return name.split(":")[1];
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
import java.util.Map;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -16,5 +17,14 @@ public class ModelDetail {
 | 
				
			|||||||
  private String parameters;
 | 
					  private String parameters;
 | 
				
			||||||
  private String template;
 | 
					  private String template;
 | 
				
			||||||
  private String system;
 | 
					  private String system;
 | 
				
			||||||
  private Map<String, String> details;
 | 
					  private ModelMeta details;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,6 +2,8 @@ package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import lombok.Data;
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@@ -21,4 +23,13 @@ public class ModelMeta {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  @JsonProperty("quantization_level")
 | 
					  @JsonProperty("quantization_level")
 | 
				
			||||||
  private String quantizationLevel;
 | 
					  private String quantizationLevel;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,8 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
import java.io.BufferedReader;
 | 
					import java.io.BufferedReader;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
@@ -22,7 +24,7 @@ import lombok.Getter;
 | 
				
			|||||||
@SuppressWarnings("unused")
 | 
					@SuppressWarnings("unused")
 | 
				
			||||||
public class OllamaAsyncResultCallback extends Thread {
 | 
					public class OllamaAsyncResultCallback extends Thread {
 | 
				
			||||||
  private final HttpRequest.Builder requestBuilder;
 | 
					  private final HttpRequest.Builder requestBuilder;
 | 
				
			||||||
  private final OllamaRequestModel ollamaRequestModel;
 | 
					  private final OllamaGenerateRequestModel ollamaRequestModel;
 | 
				
			||||||
  private final Queue<String> queue = new LinkedList<>();
 | 
					  private final Queue<String> queue = new LinkedList<>();
 | 
				
			||||||
  private String result;
 | 
					  private String result;
 | 
				
			||||||
  private boolean isDone;
 | 
					  private boolean isDone;
 | 
				
			||||||
@@ -47,7 +49,7 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  public OllamaAsyncResultCallback(
 | 
					  public OllamaAsyncResultCallback(
 | 
				
			||||||
      HttpRequest.Builder requestBuilder,
 | 
					      HttpRequest.Builder requestBuilder,
 | 
				
			||||||
      OllamaRequestModel ollamaRequestModel,
 | 
					      OllamaGenerateRequestModel ollamaRequestModel,
 | 
				
			||||||
      long requestTimeoutSeconds) {
 | 
					      long requestTimeoutSeconds) {
 | 
				
			||||||
    this.requestBuilder = requestBuilder;
 | 
					    this.requestBuilder = requestBuilder;
 | 
				
			||||||
    this.ollamaRequestModel = ollamaRequestModel;
 | 
					    this.ollamaRequestModel = ollamaRequestModel;
 | 
				
			||||||
@@ -87,8 +89,8 @@ public class OllamaAsyncResultCallback extends Thread {
 | 
				
			|||||||
            queue.add(ollamaResponseModel.getError());
 | 
					            queue.add(ollamaResponseModel.getError());
 | 
				
			||||||
            responseBuffer.append(ollamaResponseModel.getError());
 | 
					            responseBuffer.append(ollamaResponseModel.getError());
 | 
				
			||||||
          } else {
 | 
					          } else {
 | 
				
			||||||
            OllamaResponseModel ollamaResponseModel =
 | 
					            OllamaGenerateResponseModel ollamaResponseModel =
 | 
				
			||||||
                Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
					                Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
            queue.add(ollamaResponseModel.getResponse());
 | 
					            queue.add(ollamaResponseModel.getResponse());
 | 
				
			||||||
            if (!ollamaResponseModel.isDone()) {
 | 
					            if (!ollamaResponseModel.isDone()) {
 | 
				
			||||||
              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					              responseBuffer.append(ollamaResponseModel.getResponse());
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,35 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonInclude;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.BooleanToJsonFormatFlagSerializer;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@JsonInclude(JsonInclude.Include.NON_NULL)
 | 
				
			||||||
 | 
					public abstract class OllamaCommonRequestModel {
 | 
				
			||||||
 | 
					  
 | 
				
			||||||
 | 
					  protected String model;  
 | 
				
			||||||
 | 
					  @JsonSerialize(using = BooleanToJsonFormatFlagSerializer.class)
 | 
				
			||||||
 | 
					  @JsonProperty(value = "format")
 | 
				
			||||||
 | 
					  protected Boolean returnFormatJson;
 | 
				
			||||||
 | 
					  protected Map<String, Object> options;
 | 
				
			||||||
 | 
					  protected String template;
 | 
				
			||||||
 | 
					  protected boolean stream;
 | 
				
			||||||
 | 
					  @JsonProperty(value = "keep_alive")
 | 
				
			||||||
 | 
					  protected String keepAlive;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return Utils.getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,39 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import java.util.List;
 | 
					 | 
				
			||||||
import java.util.Map;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Data
 | 
					 | 
				
			||||||
public class OllamaRequestModel implements OllamaRequestBody{
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  private String model;
 | 
					 | 
				
			||||||
  private String prompt;
 | 
					 | 
				
			||||||
  private List<String> images;
 | 
					 | 
				
			||||||
  private Map<String, Object> options;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaRequestModel(String model, String prompt) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public OllamaRequestModel(String model, String prompt, List<String> images) {
 | 
					 | 
				
			||||||
    this.model = model;
 | 
					 | 
				
			||||||
    this.prompt = prompt;
 | 
					 | 
				
			||||||
    this.images = images;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  public String toString() {
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
					 | 
				
			||||||
    } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -83,12 +83,12 @@ public class OllamaChatRequestBuilder {
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withOptions(Options options){
 | 
					    public OllamaChatRequestBuilder withOptions(Options options){
 | 
				
			||||||
        this.request.setOptions(options);
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatRequestBuilder withFormat(String format){
 | 
					    public OllamaChatRequestBuilder withGetJsonResponse(){
 | 
				
			||||||
        this.request.setFormat(format);
 | 
					        this.request.setReturnFormatJson(true);
 | 
				
			||||||
        return this;
 | 
					        return this;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,47 +1,39 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.util.List;
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
import lombok.NonNull;
 | 
					 | 
				
			||||||
import lombok.RequiredArgsConstructor;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Defines a Request to use against the ollama /api/chat endpoint.
 | 
					 * Defines a Request to use against the ollama /api/chat endpoint.
 | 
				
			||||||
 *
 | 
					 *
 | 
				
			||||||
 * @see <a
 | 
					 * @see <a href=
 | 
				
			||||||
 *     href="https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
 | 
					 *      "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion">Generate
 | 
				
			||||||
 *      Chat Completion</a>
 | 
					 *      Chat Completion</a>
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
@Data
 | 
					@Getter
 | 
				
			||||||
@AllArgsConstructor
 | 
					@Setter
 | 
				
			||||||
@RequiredArgsConstructor
 | 
					public class OllamaChatRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody {
 | 
				
			||||||
public class OllamaChatRequestModel implements OllamaRequestBody {
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @NonNull private String model;
 | 
					  private List<OllamaChatMessage> messages;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @NonNull private List<OllamaChatMessage> messages;
 | 
					  public OllamaChatRequestModel() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  private String format;
 | 
					  public OllamaChatRequestModel(String model, List<OllamaChatMessage> messages) {
 | 
				
			||||||
  private Options options;
 | 
					    this.model = model;
 | 
				
			||||||
  private String template;
 | 
					    this.messages = messages;
 | 
				
			||||||
  private boolean stream;
 | 
					  }
 | 
				
			||||||
  private String keepAlive;
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  @Override
 | 
					  @Override
 | 
				
			||||||
  public String toString() {
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
    try {
 | 
					    if (!(o instanceof OllamaChatRequestModel)) {
 | 
				
			||||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
					      return false;
 | 
				
			||||||
    } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return this.toString().equals(o.toString());
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,31 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaChatStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private List<OllamaChatResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String message = "";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaChatStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void notify(OllamaChatResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    protected void handleCurrentResponsePart(OllamaChatResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        message = message + currentResponsePart.getMessage().getContent();
 | 
				
			||||||
 | 
					        streamHandler.accept(message);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -7,7 +7,7 @@ import lombok.Data;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
@SuppressWarnings("unused")
 | 
					@SuppressWarnings("unused")
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
public class EmbeddingResponse {
 | 
					public class OllamaEmbeddingResponseModel {
 | 
				
			||||||
    @JsonProperty("embedding")
 | 
					    @JsonProperty("embedding")
 | 
				
			||||||
    private List<Double> embedding;
 | 
					    private List<Double> embedding;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,31 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaEmbeddingsRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaEmbeddingsRequestBuilder(String model, String prompt){
 | 
				
			||||||
 | 
					        request = new OllamaEmbeddingsRequestModel(model, prompt);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaEmbeddingsRequestModel request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static OllamaEmbeddingsRequestBuilder getInstance(String model, String prompt){
 | 
				
			||||||
 | 
					        return new OllamaEmbeddingsRequestBuilder(model, prompt);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaEmbeddingsRequestModel build(){
 | 
				
			||||||
 | 
					        return request;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaEmbeddingsRequestBuilder withOptions(Options options){
 | 
				
			||||||
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaEmbeddingsRequestBuilder withKeepAlive(String keepAlive){
 | 
				
			||||||
 | 
					        this.request.setKeepAlive(keepAlive);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,33 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.embeddings;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
				
			||||||
 | 
					import java.util.Map;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import lombok.Data;
 | 
				
			||||||
 | 
					import lombok.NoArgsConstructor;
 | 
				
			||||||
 | 
					import lombok.NonNull;
 | 
				
			||||||
 | 
					import lombok.RequiredArgsConstructor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Data
 | 
				
			||||||
 | 
					@RequiredArgsConstructor
 | 
				
			||||||
 | 
					@NoArgsConstructor
 | 
				
			||||||
 | 
					public class OllamaEmbeddingsRequestModel {
 | 
				
			||||||
 | 
					  @NonNull
 | 
				
			||||||
 | 
					  private String model;
 | 
				
			||||||
 | 
					  @NonNull
 | 
				
			||||||
 | 
					  private String prompt;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  protected Map<String, Object> options;
 | 
				
			||||||
 | 
					  @JsonProperty(value = "keep_alive")
 | 
				
			||||||
 | 
					  private String keepAlive;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Override
 | 
				
			||||||
 | 
					  public String toString() {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
				
			||||||
 | 
					    } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					      throw new RuntimeException(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,55 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Options;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/**
 | 
				
			||||||
 | 
					 * Helper class for creating {@link io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel} 
 | 
				
			||||||
 | 
					 * objects using the builder-pattern.
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					public class OllamaGenerateRequestBuilder {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestBuilder(String model, String prompt){
 | 
				
			||||||
 | 
					        request = new OllamaGenerateRequestModel(model, prompt);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestModel request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public static OllamaGenerateRequestBuilder getInstance(String model){
 | 
				
			||||||
 | 
					        return new OllamaGenerateRequestBuilder(model,"");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestModel build(){
 | 
				
			||||||
 | 
					        return request;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withPrompt(String prompt){
 | 
				
			||||||
 | 
					        request.setPrompt(prompt);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withGetJsonResponse(){
 | 
				
			||||||
 | 
					        this.request.setReturnFormatJson(true);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withOptions(Options options){
 | 
				
			||||||
 | 
					        this.request.setOptions(options.getOptionsMap());
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withTemplate(String template){
 | 
				
			||||||
 | 
					        this.request.setTemplate(template);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withStreaming(){
 | 
				
			||||||
 | 
					        this.request.setStream(true);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateRequestBuilder withKeepAlive(String keepAlive){
 | 
				
			||||||
 | 
					        this.request.setKeepAlive(keepAlive);
 | 
				
			||||||
 | 
					        return this;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,46 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaCommonRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import lombok.Getter;
 | 
				
			||||||
 | 
					import lombok.Setter;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Getter
 | 
				
			||||||
 | 
					@Setter
 | 
				
			||||||
 | 
					public class OllamaGenerateRequestModel extends OllamaCommonRequestModel implements OllamaRequestBody{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String prompt;
 | 
				
			||||||
 | 
					  private List<String> images;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private String system;
 | 
				
			||||||
 | 
					  private String context;
 | 
				
			||||||
 | 
					  private boolean raw;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel() {
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel(String model, String prompt) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  public OllamaGenerateRequestModel(String model, String prompt, List<String> images) {
 | 
				
			||||||
 | 
					    this.model = model;
 | 
				
			||||||
 | 
					    this.prompt = prompt;
 | 
				
			||||||
 | 
					    this.images = images;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					  public boolean equals(Object o) {
 | 
				
			||||||
 | 
					    if (!(o instanceof OllamaGenerateRequestModel)) {
 | 
				
			||||||
 | 
					      return false;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return this.toString().equals(o.toString());
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
					import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 | 
				
			||||||
import com.fasterxml.jackson.annotation.JsonProperty;
 | 
					import com.fasterxml.jackson.annotation.JsonProperty;
 | 
				
			||||||
@@ -8,7 +8,7 @@ import lombok.Data;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
@Data
 | 
					@Data
 | 
				
			||||||
@JsonIgnoreProperties(ignoreUnknown = true)
 | 
					@JsonIgnoreProperties(ignoreUnknown = true)
 | 
				
			||||||
public class OllamaResponseModel {
 | 
					public class OllamaGenerateResponseModel {
 | 
				
			||||||
    private String model;
 | 
					    private String model;
 | 
				
			||||||
    private @JsonProperty("created_at") String createdAt;
 | 
					    private @JsonProperty("created_at") String createdAt;
 | 
				
			||||||
    private String response;
 | 
					    private String response;
 | 
				
			||||||
@@ -0,0 +1,31 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.generate;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.util.ArrayList;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class OllamaGenerateStreamObserver {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaStreamHandler streamHandler;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private List<OllamaGenerateResponseModel> responseParts = new ArrayList<>();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private String message = "";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaGenerateStreamObserver(OllamaStreamHandler streamHandler) {
 | 
				
			||||||
 | 
					        this.streamHandler = streamHandler;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public void notify(OllamaGenerateResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        responseParts.add(currentResponsePart);
 | 
				
			||||||
 | 
					        handleCurrentResponsePart(currentResponsePart);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    
 | 
				
			||||||
 | 
					    protected void handleCurrentResponsePart(OllamaGenerateResponseModel currentResponsePart){
 | 
				
			||||||
 | 
					        message = message + currentResponsePart.getResponse();
 | 
				
			||||||
 | 
					        streamHandler.accept(message);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,23 +0,0 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import static io.github.amithkoujalgi.ollama4j.core.utils.Utils.getObjectMapper;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					 | 
				
			||||||
import lombok.AllArgsConstructor;
 | 
					 | 
				
			||||||
import lombok.Data;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@Data
 | 
					 | 
				
			||||||
@AllArgsConstructor
 | 
					 | 
				
			||||||
public class ModelEmbeddingsRequest {
 | 
					 | 
				
			||||||
  private String model;
 | 
					 | 
				
			||||||
  private String prompt;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @Override
 | 
					 | 
				
			||||||
  public String toString() {
 | 
					 | 
				
			||||||
    try {
 | 
					 | 
				
			||||||
      return getObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
 | 
					 | 
				
			||||||
    } catch (JsonProcessingException e) {
 | 
					 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
@@ -1,12 +1,19 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResponseModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatStreamObserver;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
@@ -16,6 +23,8 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaChatEndpointCaller.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaChatStreamObserver streamObserver;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
					    public OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
				
			||||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
					        super(host, basicAuth, requestTimeoutSeconds, verbose);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -30,6 +39,9 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
        try {
 | 
					        try {
 | 
				
			||||||
            OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
 | 
					            OllamaChatResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaChatResponseModel.class);
 | 
				
			||||||
            responseBuffer.append(ollamaResponseModel.getMessage().getContent());
 | 
					            responseBuffer.append(ollamaResponseModel.getMessage().getContent());
 | 
				
			||||||
 | 
					            if(streamObserver != null) {
 | 
				
			||||||
 | 
					                streamObserver.notify(ollamaResponseModel);
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
            return ollamaResponseModel.isDone();
 | 
					            return ollamaResponseModel.isDone();
 | 
				
			||||||
        } catch (JsonProcessingException e) {
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
            LOG.error("Error parsing the Ollama chat response!",e);
 | 
					            LOG.error("Error parsing the Ollama chat response!",e);
 | 
				
			||||||
@@ -37,7 +49,11 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
        }         
 | 
					        }         
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					            throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					        streamObserver = new OllamaChatStreamObserver(streamHandler);
 | 
				
			||||||
 | 
					        return super.callSync(body);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
 
 | 
					 
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -56,7 +56,7 @@ public abstract class OllamaEndpointCaller {
 | 
				
			|||||||
     * @throws IOException in case the responseStream can not be read
 | 
					     * @throws IOException in case the responseStream can not be read
 | 
				
			||||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
					     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
				
			||||||
     */
 | 
					     */
 | 
				
			||||||
    public OllamaResult generateSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{
 | 
					    public OllamaResult callSync(OllamaRequestBody body)  throws OllamaBaseException, IOException, InterruptedException{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        // Create Request
 | 
					        // Create Request
 | 
				
			||||||
    long startTime = System.currentTimeMillis();
 | 
					    long startTime = System.currentTimeMillis();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,18 +1,25 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
					package io.github.amithkoujalgi.ollama4j.core.models.request;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
import org.slf4j.Logger;
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
import org.slf4j.LoggerFactory;
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaStreamHandler;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResponseModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateResponseModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateStreamObserver;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
					public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
					    private static final Logger LOG = LoggerFactory.getLogger(OllamaGenerateEndpointCaller.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateStreamObserver streamObserver;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
					    public OllamaGenerateEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose) {
 | 
				
			||||||
        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
					        super(host, basicAuth, requestTimeoutSeconds, verbose);   
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -25,8 +32,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
    @Override
 | 
					    @Override
 | 
				
			||||||
    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
					    protected boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer) {
 | 
				
			||||||
                try {
 | 
					                try {
 | 
				
			||||||
                    OllamaResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaResponseModel.class);
 | 
					                    OllamaGenerateResponseModel ollamaResponseModel = Utils.getObjectMapper().readValue(line, OllamaGenerateResponseModel.class);
 | 
				
			||||||
                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
					                    responseBuffer.append(ollamaResponseModel.getResponse());
 | 
				
			||||||
 | 
					                    if(streamObserver != null) {
 | 
				
			||||||
 | 
					                        streamObserver.notify(ollamaResponseModel);
 | 
				
			||||||
 | 
					                    }
 | 
				
			||||||
                    return ollamaResponseModel.isDone();
 | 
					                    return ollamaResponseModel.isDone();
 | 
				
			||||||
                } catch (JsonProcessingException e) {
 | 
					                } catch (JsonProcessingException e) {
 | 
				
			||||||
                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
					                    LOG.error("Error parsing the Ollama chat response!",e);
 | 
				
			||||||
@@ -34,7 +44,11 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller{
 | 
				
			|||||||
                }         
 | 
					                }         
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    
 | 
					    public OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
 | 
				
			||||||
 | 
					        throws OllamaBaseException, IOException, InterruptedException {
 | 
				
			||||||
 | 
					    streamObserver = new OllamaGenerateStreamObserver(streamHandler);
 | 
				
			||||||
 | 
					    return super.callSync(body);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -8,57 +8,72 @@ package io.github.amithkoujalgi.ollama4j.core.types;
 | 
				
			|||||||
 */
 | 
					 */
 | 
				
			||||||
@SuppressWarnings("ALL")
 | 
					@SuppressWarnings("ALL")
 | 
				
			||||||
public class OllamaModelType {
 | 
					public class OllamaModelType {
 | 
				
			||||||
 | 
					  public static final String GEMMA = "gemma";
 | 
				
			||||||
  public static final String LLAMA2 = "llama2";
 | 
					  public static final String LLAMA2 = "llama2";
 | 
				
			||||||
  public static final String MISTRAL = "mistral";
 | 
					  public static final String MISTRAL = "mistral";
 | 
				
			||||||
  public static final String LLAVA = "llava";
 | 
					 | 
				
			||||||
  public static final String MIXTRAL = "mixtral";
 | 
					  public static final String MIXTRAL = "mixtral";
 | 
				
			||||||
  public static final String STARLING_LM = "starling-lm";
 | 
					  public static final String LLAVA = "llava";
 | 
				
			||||||
  public static final String NEURAL_CHAT = "neural-chat";
 | 
					  public static final String NEURAL_CHAT = "neural-chat";
 | 
				
			||||||
  public static final String CODELLAMA = "codellama";
 | 
					  public static final String CODELLAMA = "codellama";
 | 
				
			||||||
  public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
 | 
					 | 
				
			||||||
  public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
 | 
					  public static final String DOLPHIN_MIXTRAL = "dolphin-mixtral";
 | 
				
			||||||
 | 
					  public static final String MISTRAL_OPENORCA = "mistral-openorca";
 | 
				
			||||||
 | 
					  public static final String LLAMA2_UNCENSORED = "llama2-uncensored";
 | 
				
			||||||
 | 
					  public static final String PHI = "phi";
 | 
				
			||||||
  public static final String ORCA_MINI = "orca-mini";
 | 
					  public static final String ORCA_MINI = "orca-mini";
 | 
				
			||||||
 | 
					  public static final String DEEPSEEK_CODER = "deepseek-coder";
 | 
				
			||||||
 | 
					  public static final String DOLPHIN_MISTRAL = "dolphin-mistral";
 | 
				
			||||||
  public static final String VICUNA = "vicuna";
 | 
					  public static final String VICUNA = "vicuna";
 | 
				
			||||||
  public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
 | 
					  public static final String WIZARD_VICUNA_UNCENSORED = "wizard-vicuna-uncensored";
 | 
				
			||||||
  public static final String PHIND_CODELLAMA = "phind-codellama";
 | 
					 | 
				
			||||||
  public static final String PHI = "phi";
 | 
					 | 
				
			||||||
  public static final String ZEPHYR = "zephyr";
 | 
					  public static final String ZEPHYR = "zephyr";
 | 
				
			||||||
 | 
					  public static final String OPENHERMES = "openhermes";
 | 
				
			||||||
 | 
					  public static final String QWEN = "qwen";
 | 
				
			||||||
  public static final String WIZARDCODER = "wizardcoder";
 | 
					  public static final String WIZARDCODER = "wizardcoder";
 | 
				
			||||||
  public static final String MISTRAL_OPENORCA = "mistral-openorca";
 | 
					 | 
				
			||||||
  public static final String NOUS_HERMES = "nous-hermes";
 | 
					 | 
				
			||||||
  public static final String DEEPSEEK_CODER = "deepseek-coder";
 | 
					 | 
				
			||||||
  public static final String WIZARD_MATH = "wizard-math";
 | 
					 | 
				
			||||||
  public static final String LLAMA2_CHINESE = "llama2-chinese";
 | 
					  public static final String LLAMA2_CHINESE = "llama2-chinese";
 | 
				
			||||||
  public static final String FALCON = "falcon";
 | 
					  public static final String TINYLLAMA = "tinyllama";
 | 
				
			||||||
  public static final String ORCA2 = "orca2";
 | 
					  public static final String PHIND_CODELLAMA = "phind-codellama";
 | 
				
			||||||
  public static final String STABLE_BELUGA = "stable-beluga";
 | 
					 | 
				
			||||||
  public static final String CODEUP = "codeup";
 | 
					 | 
				
			||||||
  public static final String EVERYTHINGLM = "everythinglm";
 | 
					 | 
				
			||||||
  public static final String MEDLLAMA2 = "medllama2";
 | 
					 | 
				
			||||||
  public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
 | 
					 | 
				
			||||||
  public static final String STARCODER = "starcoder";
 | 
					 | 
				
			||||||
  public static final String DOLPHIN22_MISTRAL = "dolphin2.2-mistral";
 | 
					 | 
				
			||||||
  public static final String OPENCHAT = "openchat";
 | 
					  public static final String OPENCHAT = "openchat";
 | 
				
			||||||
  public static final String WIZARD_VICUNA = "wizard-vicuna";
 | 
					  public static final String ORCA2 = "orca2";
 | 
				
			||||||
  public static final String OPENHERMES25_MISTRAL = "openhermes2.5-mistral";
 | 
					  public static final String FALCON = "falcon";
 | 
				
			||||||
  public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
 | 
					  public static final String WIZARD_MATH = "wizard-math";
 | 
				
			||||||
 | 
					  public static final String TINYDOLPHIN = "tinydolphin";
 | 
				
			||||||
 | 
					  public static final String NOUS_HERMES = "nous-hermes";
 | 
				
			||||||
  public static final String YI = "yi";
 | 
					  public static final String YI = "yi";
 | 
				
			||||||
  public static final String YARN_MISTRAL = "yarn-mistral";
 | 
					  public static final String DOLPHIN_PHI = "dolphin-phi";
 | 
				
			||||||
  public static final String SAMANTHA_MISTRAL = "samantha-mistral";
 | 
					  public static final String STARLING_LM = "starling-lm";
 | 
				
			||||||
  public static final String SQLCODER = "sqlcoder";
 | 
					  public static final String STARCODER = "starcoder";
 | 
				
			||||||
  public static final String YARN_LLAMA2 = "yarn-llama2";
 | 
					  public static final String CODEUP = "codeup";
 | 
				
			||||||
  public static final String MEDITRON = "meditron";
 | 
					  public static final String MEDLLAMA2 = "medllama2";
 | 
				
			||||||
  public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
 | 
					  public static final String STABLE_CODE = "stable-code";
 | 
				
			||||||
  public static final String OPENHERMES2_MISTRAL = "openhermes2-mistral";
 | 
					  public static final String WIZARDLM_UNCENSORED = "wizardlm-uncensored";
 | 
				
			||||||
  public static final String DEEPSEEK_LLM = "deepseek-llm";
 | 
					 | 
				
			||||||
  public static final String MISTRALLITE = "mistrallite";
 | 
					 | 
				
			||||||
  public static final String DOLPHIN21_MISTRAL = "dolphin2.1-mistral";
 | 
					 | 
				
			||||||
  public static final String WIZARDLM = "wizardlm";
 | 
					 | 
				
			||||||
  public static final String CODEBOOGA = "codebooga";
 | 
					 | 
				
			||||||
  public static final String MAGICODER = "magicoder";
 | 
					 | 
				
			||||||
  public static final String GOLIATH = "goliath";
 | 
					 | 
				
			||||||
  public static final String NEXUSRAVEN = "nexusraven";
 | 
					 | 
				
			||||||
  public static final String ALFRED = "alfred";
 | 
					 | 
				
			||||||
  public static final String XWINLM = "xwinlm";
 | 
					 | 
				
			||||||
  public static final String BAKLLAVA = "bakllava";
 | 
					  public static final String BAKLLAVA = "bakllava";
 | 
				
			||||||
 | 
					  public static final String EVERYTHINGLM = "everythinglm";
 | 
				
			||||||
 | 
					  public static final String SOLAR = "solar";
 | 
				
			||||||
 | 
					  public static final String STABLE_BELUGA = "stable-beluga";
 | 
				
			||||||
 | 
					  public static final String SQLCODER = "sqlcoder";
 | 
				
			||||||
 | 
					  public static final String YARN_MISTRAL = "yarn-mistral";
 | 
				
			||||||
 | 
					  public static final String NOUS_HERMES2_MIXTRAL = "nous-hermes2-mixtral";
 | 
				
			||||||
 | 
					  public static final String SAMANTHA_MISTRAL = "samantha-mistral";
 | 
				
			||||||
 | 
					  public static final String STABLELM_ZEPHYR = "stablelm-zephyr";
 | 
				
			||||||
 | 
					  public static final String MEDITRON = "meditron";
 | 
				
			||||||
 | 
					  public static final String WIZARD_VICUNA = "wizard-vicuna";
 | 
				
			||||||
 | 
					  public static final String STABLELM2 = "stablelm2";
 | 
				
			||||||
 | 
					  public static final String MAGICODER = "magicoder";
 | 
				
			||||||
 | 
					  public static final String YARN_LLAMA2 = "yarn-llama2";
 | 
				
			||||||
 | 
					  public static final String NOUS_HERMES2 = "nous-hermes2";
 | 
				
			||||||
 | 
					  public static final String DEEPSEEK_LLM = "deepseek-llm";
 | 
				
			||||||
 | 
					  public static final String LLAMA_PRO = "llama-pro";
 | 
				
			||||||
 | 
					  public static final String OPEN_ORCA_PLATYPUS2 = "open-orca-platypus2";
 | 
				
			||||||
 | 
					  public static final String CODEBOOGA = "codebooga";
 | 
				
			||||||
 | 
					  public static final String MISTRALLITE = "mistrallite";
 | 
				
			||||||
 | 
					  public static final String NEXUSRAVEN = "nexusraven";
 | 
				
			||||||
 | 
					  public static final String GOLIATH = "goliath";
 | 
				
			||||||
 | 
					  public static final String NOMIC_EMBED_TEXT = "nomic-embed-text";
 | 
				
			||||||
 | 
					  public static final String NOTUX = "notux";
 | 
				
			||||||
 | 
					  public static final String ALFRED = "alfred";
 | 
				
			||||||
 | 
					  public static final String MEGADOLPHIN = "megadolphin";
 | 
				
			||||||
 | 
					  public static final String WIZARDLM = "wizardlm";
 | 
				
			||||||
 | 
					  public static final String XWINLM = "xwinlm";
 | 
				
			||||||
 | 
					  public static final String NOTUS = "notus";
 | 
				
			||||||
 | 
					  public static final String DUCKDB_NSQL = "duckdb-nsql";
 | 
				
			||||||
 | 
					  public static final String ALL_MINILM = "all-minilm";
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,21 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.IOException;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonGenerator;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.JsonSerializer;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.SerializerProvider;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class BooleanToJsonFormatFlagSerializer extends JsonSerializer<Boolean>{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public void serialize(Boolean value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
 | 
				
			||||||
 | 
					            gen.writeString("json");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public boolean isEmpty(SerializerProvider provider,Boolean value){
 | 
				
			||||||
 | 
					        return !value;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -1,8 +1,6 @@
 | 
				
			|||||||
package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
					package io.github.amithkoujalgi.ollama4j.core.utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import java.io.ByteArrayOutputStream;
 | 
					 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
import java.io.ObjectOutputStream;
 | 
					 | 
				
			||||||
import java.util.Base64;
 | 
					import java.util.Base64;
 | 
				
			||||||
import java.util.Collection;
 | 
					import java.util.Collection;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -20,11 +18,4 @@ public class FileToBase64Serializer extends JsonSerializer<Collection<byte[]>> {
 | 
				
			|||||||
        }
 | 
					        }
 | 
				
			||||||
        jsonGenerator.writeEndArray();
 | 
					        jsonGenerator.writeEndArray();
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					 | 
				
			||||||
    public static byte[] serialize(Object obj) throws IOException {
 | 
					 | 
				
			||||||
        ByteArrayOutputStream out = new ByteArrayOutputStream();
 | 
					 | 
				
			||||||
        ObjectOutputStream os = new ObjectOutputStream(out);
 | 
					 | 
				
			||||||
        os.writeObject(obj);
 | 
					 | 
				
			||||||
        return out.toByteArray();
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -4,11 +4,14 @@ import static org.junit.jupiter.api.Assertions.*;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
					import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
					import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.ModelDetail;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
				
			||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
import java.io.File;
 | 
					import java.io.File;
 | 
				
			||||||
import java.io.IOException;
 | 
					import java.io.IOException;
 | 
				
			||||||
@@ -23,8 +26,13 @@ import lombok.Data;
 | 
				
			|||||||
import org.junit.jupiter.api.BeforeEach;
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
import org.junit.jupiter.api.Order;
 | 
					import org.junit.jupiter.api.Order;
 | 
				
			||||||
import org.junit.jupiter.api.Test;
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					import org.slf4j.Logger;
 | 
				
			||||||
 | 
					import org.slf4j.LoggerFactory;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TestRealAPIs {
 | 
					class TestRealAPIs {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  private static final Logger LOG = LoggerFactory.getLogger(TestRealAPIs.class);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  OllamaAPI ollamaAPI;
 | 
					  OllamaAPI ollamaAPI;
 | 
				
			||||||
  Config config;
 | 
					  Config config;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -55,7 +63,7 @@ class TestRealAPIs {
 | 
				
			|||||||
    } catch (HttpConnectTimeoutException e) {
 | 
					    } catch (HttpConnectTimeoutException e) {
 | 
				
			||||||
      fail(e.getMessage());
 | 
					      fail(e.getMessage());
 | 
				
			||||||
    } catch (Exception e) {
 | 
					    } catch (Exception e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -67,7 +75,7 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertNotNull(ollamaAPI.listModels());
 | 
					      assertNotNull(ollamaAPI.listModels());
 | 
				
			||||||
      ollamaAPI.listModels().forEach(System.out::println);
 | 
					      ollamaAPI.listModels().forEach(System.out::println);
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -82,7 +90,20 @@ class TestRealAPIs {
 | 
				
			|||||||
              .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
					              .anyMatch(model -> model.getModel().equalsIgnoreCase(config.getModel()));
 | 
				
			||||||
      assertTrue(found);
 | 
					      assertTrue(found);
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testListDtails() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      ModelDetail modelDetails = ollamaAPI.getModelDetails(config.getModel());
 | 
				
			||||||
 | 
					      assertNotNull(modelDetails);
 | 
				
			||||||
 | 
					      System.out.println(modelDetails);
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -100,7 +121,33 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertNotNull(result.getResponse());
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testAskModelWithDefaultOptionsStreamed() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaResult result = ollamaAPI.generate(config.getModel(),
 | 
				
			||||||
 | 
					          "What is the capital of France? And what's France's connection with Mona Lisa?",
 | 
				
			||||||
 | 
					          new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					            LOG.info(s);
 | 
				
			||||||
 | 
					            String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					            LOG.info(substring);
 | 
				
			||||||
 | 
					            sb.append(substring);
 | 
				
			||||||
 | 
					          });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      assertNotNull(result);
 | 
				
			||||||
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -118,7 +165,7 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertNotNull(result.getResponse());
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -138,7 +185,7 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertFalse(chatResult.getResponse().isBlank());
 | 
					      assertFalse(chatResult.getResponse().isBlank());
 | 
				
			||||||
      assertEquals(4,chatResult.getChatHistory().size());
 | 
					      assertEquals(4,chatResult.getChatHistory().size());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -160,7 +207,32 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
					      assertTrue(chatResult.getResponse().startsWith("NI"));
 | 
				
			||||||
      assertEquals(3, chatResult.getChatHistory().size());
 | 
					      assertEquals(3, chatResult.getChatHistory().size());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testChatWithStream() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(config.getModel());
 | 
				
			||||||
 | 
					      OllamaChatRequestModel requestModel = builder.withMessage(OllamaChatMessageRole.USER,
 | 
				
			||||||
 | 
					              "What is the capital of France? And what's France's connection with Mona Lisa?")
 | 
				
			||||||
 | 
					          .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaChatResult chatResult = ollamaAPI.chat(requestModel,(s) -> {
 | 
				
			||||||
 | 
					        LOG.info(s);
 | 
				
			||||||
 | 
					        String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					        LOG.info(substring);
 | 
				
			||||||
 | 
					        sb.append(substring);
 | 
				
			||||||
 | 
					      });
 | 
				
			||||||
 | 
					      assertNotNull(chatResult);
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), chatResult.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -191,7 +263,7 @@ class TestRealAPIs {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -208,7 +280,7 @@ class TestRealAPIs {
 | 
				
			|||||||
      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
					      OllamaChatResult chatResult = ollamaAPI.chat(requestModel);
 | 
				
			||||||
      assertNotNull(chatResult);
 | 
					      assertNotNull(chatResult);
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -228,7 +300,31 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertNotNull(result.getResponse());
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  void testAskModelWithOptionsAndImageFilesStreamed() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    File imageFile = getImageFileFromClasspath("dog-on-a-boat.jpg");
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      StringBuffer sb = new StringBuffer("");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      OllamaResult result = ollamaAPI.generateWithImageFiles(config.getImageModel(),
 | 
				
			||||||
 | 
					          "What is in this image?", List.of(imageFile), new OptionsBuilder().build(), (s) -> {
 | 
				
			||||||
 | 
					            LOG.info(s);
 | 
				
			||||||
 | 
					            String substring = s.substring(sb.toString().length(), s.length());
 | 
				
			||||||
 | 
					            LOG.info(substring);
 | 
				
			||||||
 | 
					            sb.append(substring);
 | 
				
			||||||
 | 
					          });
 | 
				
			||||||
 | 
					      assertNotNull(result);
 | 
				
			||||||
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
 | 
					      assertEquals(sb.toString().trim(), result.getResponse().trim());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -248,7 +344,24 @@ class TestRealAPIs {
 | 
				
			|||||||
      assertNotNull(result.getResponse());
 | 
					      assertNotNull(result.getResponse());
 | 
				
			||||||
      assertFalse(result.getResponse().isEmpty());
 | 
					      assertFalse(result.getResponse().isEmpty());
 | 
				
			||||||
    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException | URISyntaxException e) {
 | 
				
			||||||
      throw new RuntimeException(e);
 | 
					      fail(e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  @Test
 | 
				
			||||||
 | 
					  @Order(3)
 | 
				
			||||||
 | 
					  public void testEmbedding() {
 | 
				
			||||||
 | 
					    testEndpointReachability();
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      OllamaEmbeddingsRequestModel request = OllamaEmbeddingsRequestBuilder
 | 
				
			||||||
 | 
					          .getInstance(config.getModel(), "What is the capital of France?").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      List<Double> embeddings = ollamaAPI.generateEmbeddings(request);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      assertNotNull(embeddings);
 | 
				
			||||||
 | 
					      assertFalse(embeddings.isEmpty());
 | 
				
			||||||
 | 
					    } catch (IOException | OllamaBaseException | InterruptedException e) {
 | 
				
			||||||
 | 
					      fail(e);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -0,0 +1,35 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.fail;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.core.JsonProcessingException;
 | 
				
			||||||
 | 
					import com.fasterxml.jackson.databind.ObjectMapper;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public abstract class AbstractRequestSerializationTest<T> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    protected ObjectMapper mapper = Utils.getObjectMapper();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    protected String serializeRequest(T req) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.writeValueAsString(req);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not serialize request!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    protected T deserializeRequest(String jsonRequest, Class<T> requestClass) {
 | 
				
			||||||
 | 
					        try {
 | 
				
			||||||
 | 
					            return mapper.readValue(jsonRequest, requestClass);
 | 
				
			||||||
 | 
					        } catch (JsonProcessingException e) {
 | 
				
			||||||
 | 
					            fail("Could not deserialize jsonRequest!", e);
 | 
				
			||||||
 | 
					            return null;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    protected void assertEqualsAfterUnmarshalling(T unmarshalledRequest,
 | 
				
			||||||
 | 
					        T req) {
 | 
				
			||||||
 | 
					        assertEquals(req, unmarshalledRequest);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,113 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import java.io.File;
 | 
				
			||||||
 | 
					import java.util.List;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import org.json.JSONObject;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatMessageRole;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class TestChatRequestSerialization extends AbstractRequestSerializationTest<OllamaChatRequestModel>{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaChatRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @BeforeEach
 | 
				
			||||||
 | 
					    public void init() {
 | 
				
			||||||
 | 
					        builder = OllamaChatRequestBuilder.getInstance("DummyModel");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt").build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestMultipleMessages() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.SYSTEM, "System prompt")
 | 
				
			||||||
 | 
					        .withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					        .build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithMessageAndImage() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt",
 | 
				
			||||||
 | 
					                List.of(new File("src/test/resources/dog-on-a-boat.jpg"))).build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaChatRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					            .withOptions(b.setMirostat(1).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setTemperature(1L).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setMirostatEta(1L).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setMirostatTau(1L).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setNumGpu(1).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setSeed(1).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setTopK(1).build())
 | 
				
			||||||
 | 
					            .withOptions(b.setTopP(1).build())
 | 
				
			||||||
 | 
					            .build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        OllamaChatRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaChatRequestModel.class);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("temperature"));
 | 
				
			||||||
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_eta"));
 | 
				
			||||||
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("mirostat_tau"));
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("num_gpu"));
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("seed"));
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("top_k"));
 | 
				
			||||||
 | 
					        assertEquals(1.0, deserializeRequest.getOptions().get("top_p"));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, "Some prompt")
 | 
				
			||||||
 | 
					                .withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
 | 
					        String requestFormatProperty = jsonObject.getString("format");
 | 
				
			||||||
 | 
					        assertEquals("json", requestFormatProperty);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithTemplate() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withTemplate("System Template")
 | 
				
			||||||
 | 
					            .build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaChatRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithStreaming() {
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withStreaming().build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).isStream(), true);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithKeepAlive() {
 | 
				
			||||||
 | 
					        String expectedKeepAlive = "5m";
 | 
				
			||||||
 | 
					        OllamaChatRequestModel req = builder.withKeepAlive(expectedKeepAlive)
 | 
				
			||||||
 | 
					            .build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEquals(deserializeRequest(jsonRequest, OllamaChatRequestModel.class).getKeepAlive(), expectedKeepAlive);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,37 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class TestEmbeddingsRequestSerialization extends AbstractRequestSerializationTest<OllamaEmbeddingsRequestModel>{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        private OllamaEmbeddingsRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @BeforeEach
 | 
				
			||||||
 | 
					        public void init() {
 | 
				
			||||||
 | 
					            builder = OllamaEmbeddingsRequestBuilder.getInstance("DummyModel","DummyPrompt");
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            @Test
 | 
				
			||||||
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
 | 
					        OllamaEmbeddingsRequestModel req = builder.build();
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @Test
 | 
				
			||||||
 | 
					        public void testRequestWithOptions() {
 | 
				
			||||||
 | 
					            OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
 | 
					            OllamaEmbeddingsRequestModel req = builder
 | 
				
			||||||
 | 
					                    .withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					            OllamaEmbeddingsRequestModel deserializeRequest = deserializeRequest(jsonRequest,OllamaEmbeddingsRequestModel.class);
 | 
				
			||||||
 | 
					            assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
 | 
					            assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -0,0 +1,56 @@
 | 
				
			|||||||
 | 
					package io.github.amithkoujalgi.ollama4j.unittests.jackson;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import static org.junit.jupiter.api.Assertions.assertEquals;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import org.json.JSONObject;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.BeforeEach;
 | 
				
			||||||
 | 
					import org.junit.jupiter.api.Test;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestBuilder;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
 | 
				
			||||||
 | 
					import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public class TestGenerateRequestSerialization extends AbstractRequestSerializationTest<OllamaGenerateRequestModel>{
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    private OllamaGenerateRequestBuilder builder;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @BeforeEach
 | 
				
			||||||
 | 
					    public void init() {
 | 
				
			||||||
 | 
					        builder = OllamaGenerateRequestBuilder.getInstance("DummyModel");
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestOnlyMandatoryFields() {
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req = builder.withPrompt("Some prompt").build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class), req);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testRequestWithOptions() {
 | 
				
			||||||
 | 
					        OptionsBuilder b = new OptionsBuilder();
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req =
 | 
				
			||||||
 | 
					                builder.withPrompt("Some prompt").withOptions(b.setMirostat(1).build()).build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel deserializeRequest = deserializeRequest(jsonRequest, OllamaGenerateRequestModel.class);
 | 
				
			||||||
 | 
					        assertEqualsAfterUnmarshalling(deserializeRequest, req);
 | 
				
			||||||
 | 
					        assertEquals(1, deserializeRequest.getOptions().get("mirostat"));
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Test
 | 
				
			||||||
 | 
					    public void testWithJsonFormat() {
 | 
				
			||||||
 | 
					        OllamaGenerateRequestModel req =
 | 
				
			||||||
 | 
					                builder.withPrompt("Some prompt").withGetJsonResponse().build();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        String jsonRequest = serializeRequest(req);
 | 
				
			||||||
 | 
					        // no jackson deserialization as format property is not boolean ==> omit as deserialization
 | 
				
			||||||
 | 
					        // of request is never used in real code anyways
 | 
				
			||||||
 | 
					        JSONObject jsonObject = new JSONObject(jsonRequest);
 | 
				
			||||||
 | 
					        String requestFormatProperty = jsonObject.getString("format");
 | 
				
			||||||
 | 
					        assertEquals("json", requestFormatProperty);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
		Reference in New Issue
	
	Block a user